from datetime import datetime, timedelta
from airflow import DAG

from airflow.operators.python import PythonOperator
from airflow.operators.empty import EmptyOperator
from hashlib import md5
from requests_html import AsyncHTMLSession

import psycopg2
import pandas as pd
import sqlalchemy
from sqlalchemy import create_engine, text
from sqlalchemy.orm import sessionmaker

import pendulum

import requests
from bs4 import BeautifulSoup
import logging
import re
import json
import asyncio
import time

from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.action_chains import ActionChains

# ------------------------------------------------------------------------------
# Словарь для фильтра
# ------------------------------------------------------------------------------

# ------------------------------------------------------------------------------
# Конфиг с параметрами
# ------------------------------------------------------------------------------
with open('config/config.json') as f:
    config_json = json.load(f)
    conn_string = config_json['db_local_string']
    lot_data_columns = config_json['lot_data_columns']
    search_params = config_json['search_params']


# ------------------------------------------------------------------------------
# SQL
# ------------------------------------------------------------------------------
url_search = 'https://xn----etbpba5admdlad.xn--p1ai/search?search=&categorie_childs%5B%5D=7&regions%5B%5D=77&trades-section=&trades-type=\
&begin-price-from=&begin-price-to=&current-price-from=&current-price-to=&begin_bid_from=&begin_bid_to=&\
end_bid_from=&end_bid_to=&debtor_type=&debtor_name=&debtor_inn=&group_org=&organizer_name=&arbitr_inn='

q_action_delete = """
insert
    into
    etl.lot_data({lot_data_columns}, 
    full_descr_changed, 
    price_elements_changed,
    action_flg)
select
    t1.lot_id,
    t1.region,
    t1.category,
    t1.initial_price,
    t1.current_price,
    t1.deposit,
    t1.step,
    t1.full_descr,
    t1.status,
    t1.offer_price_start_dt,
    t1.application_start_dt,
    t1.application_end_dt,
    t1.period_application_start_dt,
    t1.period_application_end_dt,
    t1.trades_type,
    t1.trades_section,
    t1.trades_market,
    0 as full_descr_changed,
    0 as price_elements_changed,
    'D' as action_flg
from
    (
    select
        *,
        row_number() over(partition by lot_id
    order by
        create_dttm desc) as rn
    from
        etl.lot_data) t1
left join stg.lot_data t2 on
    t1.lot_id = t2.lot_id
where
    t1.rn = 1
    and t1.action_flg <> 'D'
    and t2.lot_id is null;
""".format(lot_data_columns = ', '.join(lot_data_columns))




q_action_update_insert = """
insert into etl.lot_data({lot_data_columns},full_descr_changed, price_elements_changed, action_flg)
select
    t.{lot_data_columns_1},
    case
        when t.new_person_flg = 1 then 0
        else t.full_descr_changed
    end as full_descr_changed,
    case
        when t.new_person_flg = 1 then 0
        else t.price_elements_changed
    end as price_elements_changed,
    case
        when new_person_flg = 1 then 'I'
        else 'U'
    end as action_flg
from
    (
    select
        t1.*,
        case
            when (md5(t1.lot_id || coalesce(t1.region,
            '0') || coalesce(t1.category,
            '0') || coalesce(t1.initial_price,
            '0') || coalesce(t1.current_price,
            '0') || coalesce(t1.deposit,
            '0') || 
coalesce(t1.step,
            '0') || coalesce(t1.full_descr,
            '0') || coalesce(t1.status,
            '0') || coalesce(t1.offer_price_start_dt,
            '1995-09-01'::date) || coalesce(t1.application_start_dt,
            '1995-09-01'::date) ||
coalesce(t1.application_end_dt,
            '1995-09-01'::date) || coalesce(t1.period_application_start_dt,
            '1995-09-01'::date) || coalesce(t1.period_application_end_dt,
            '1995-09-01'::date) || coalesce(t1.trades_type,
            '0') || coalesce(t1.trades_section,
            '0')) <> 
                  md5(t2.lot_id || coalesce(t2.region,
            '0') || coalesce(t2.category,
            '0') || coalesce(t2.initial_price,
            '0') || coalesce(t2.current_price,
            '0') || coalesce(t2.deposit,
            '0') || 
coalesce(t2.step,
            '0') || coalesce(t2.full_descr,
            '0') || coalesce(t2.status,
            '0') || coalesce(t2.offer_price_start_dt,
            '1995-09-01'::date) || coalesce(t2.application_start_dt,
            '1995-09-01'::date) ||
coalesce(t2.application_end_dt,
            '1995-09-01'::date) || coalesce(t2.period_application_start_dt,
            '1995-09-01'::date) || coalesce(t2.period_application_end_dt,
            '1995-09-01'::date) || coalesce(t2.trades_type,
            '0') || coalesce(t2.trades_section,
            '0'))) then 1
            else 0
        end as updated_person_flg,
        case
            when t1.full_descr <> t2.full_descr then 1
            else 0
        end as full_descr_changed,
        case
        when md5(t1.lot_id || coalesce(t1.initial_price,'0') || coalesce(t1.current_price, '0') || coalesce(t1.deposit, '0') || coalesce(t1.step, '0')) <> 
            md5(t2.lot_id ||coalesce(t2.initial_price, '0') || coalesce(t2.current_price, '0') || coalesce(t2.deposit, '0') || coalesce(t2.step, '0')) then 1
            else 0
        end as price_elements_changed,
        case
            when t2.lot_id is null then 1
            else 0
        end as new_person_flg
    from
        stg.lot_data t1
    left join (
        select
            {lot_data_columns}
        from
            (
            select
                *,
                row_number() over(partition by lot_id
            order by
                create_dttm desc) as rn
            from
                etl.lot_data) t
        where
            rn = 1
            and action_flg <> 'D') t2 on
        t1.lot_id = t2.lot_id) t
where
    updated_person_flg = 1
    or new_person_flg = 1;
""".format(lot_data_columns = ', '.join(config_json['lot_data_columns']), 
                                   lot_data_columns_1 = ', t.'.join(config_json['lot_data_columns']))

q_new_media = """
insert
    into
    etl.lot_media (lot_id, media_link, position, media_md5)
select
    t1.lot_id,
    t1.media_link,
    t1.position,
    t1.media_md5
from
    stg.lot_media t1
left join etl.lot_media t2 on
    t1.lot_id = t2.lot_id
    and t1.media_link = t2.media_link
where
    t2.lot_id is null;
"""

q_new_lot_data_info = """
select
    t.id,
    t.lot_id,
    t.full_descr
from
    (
    select
        ld.id,
        ld.lot_id,
        ld.full_descr,
        row_number() over(partition by ld.lot_id
    order by
        ld.id desc) as rn
    from
        etl.lot_data ld
    left join etl.lot_data_info ldi
            using(id)
    where
        (ld.action_flg = 'I'
            and ldi.id is null)
        or (ld.action_flg = 'U'
            and ld.full_descr_changed = 1
            and ldi.id is null)) t
where
    rn = 1;
"""
q_lot_cord = """
select
    id,
    longitude,
    latitude
from
    etl.lot_data_info
where
    longitude is not null
 and cord_json is null;
"""
# настройки для драйвера
chrome_options = Options()
chrome_options.add_argument('--no-sandbox')
chrome_options.add_argument('--window-size=1920,1080')
chrome_options.add_argument('--headless')
chrome_options.add_argument('--disable-dev-shm-usage')
chrome_options.add_argument('--disable-gpu')
chrome_options.add_argument("--disable-notifications")
chrome_options.add_argument("--remote-debugging-port=9222")
chrome_options.add_experimental_option("excludeSwitches", ["enable-automation"])
chrome_options.add_experimental_option('useAutomationExtension', False)
chrome_options.add_experimental_option("excludeSwitches", ["disable-popup-blocking"])
chrome_options.add_argument('--disable-blink-features=AutomationControlled')
chrome_options.add_argument("--disable-extensions")

LOCAL_TZ = pendulum.timezone("Europe/Moscow")
# регулярка ЕГРН
oks_mask ="[0-9]{2}:[0-9]{2}:[0-9]{7}:[0-9]*"
# регулярки для площади
square_mask, square_digit_mask = "[1-9].\d*[\.,]?\d*[\s]?(?:м²|кв[\.]?[\s]?м|м2)", "[1-9].\d*[\.\,]?\d*"
adress_mask = "(?:г?[\.\s]+(?:москва|зеленоград|область)).+(?:кв|квартира|вл|владение|д|дом)[\s\.№]*[1-9]"
# регулярка для доли
share_mask = "дол[я|и]"
# функция для перевода площадей в чиселку
def get_unique_square(square_list):
    if len(square_list) == 0:
        return []
    else:
        float_square_list = []
        for x in square_list:
           try:
               float_square_list.append(float(re.findall(pattern=square_digit_mask, string=x)[0].replace(',', '.').replace(' ', '')))
           except Exception:
               print('Exception with square value: [' + x + ']')
        return list(set(float_square_list))

def search_date(x):
    return pd.to_datetime(re.search("([0-9]{2}\.[0-9]{2}\.[0-9]{4})", string = x)[0], format = '%d.%m.%Y')

def search_price(x):
    try:
        return int(re.search('(?<!%)([0-9]{3,}),?', x.replace('\xa0', '').replace('.', ',').replace(' ', '')).group(1))
    except Exception:
        return None

# функция-помогатор для парсинга адресов
def get_adress(x, pattern = adress_mask):
    res = re.findall(pattern=adress_mask, string=x.lower())
    if len(res) == 0:
        return None
    else:
        return res[0]


def rebuild_params(params, search_params):
    _ = search_params.copy()
    for _key in params:
        _[_key] = params[_key]
    return _

def get_geo_json(_geocode):
    try:
        params = {'apikey':auth_key, 
             'geocode':_geocode,
             'format':'json'}
        response = requests.get(url = url_maps, params = params)
        if response.status_code == 200:
            address_formatted = response.json()['response']['GeoObjectCollection']['featureMember'][0]['GeoObject']['metaDataProperty']\
    ['GeocoderMetaData']['Address']['formatted']
            longitude = float(response.json()['response']['GeoObjectCollection']['featureMember']\
    [0]['GeoObject']['Point']['pos'].split(' ')[0])
            latitude = float(response.json()['response']['GeoObjectCollection']['featureMember']\
    [0]['GeoObject']['Point']['pos'].split(' ')[1])
            return address_formatted, longitude, latitude, response.json()['response']['GeoObjectCollection']['featureMember'][0]['GeoObject']
        else:
            return [None, None, None, None]
    except Exception:
        return [None, None, None, None]

def get_cord_json(x):
    if pd.notna(x['longitude']):
        try:
            cord = str(x['longitude']) + ',' + str(x['latitude'])
            params = {'apikey':auth_key, 
             'geocode':cord, 
              'kind':'metro',
             'format':'json'}
            response = requests.get(url = url_maps, params = params)
            if response.status_code == 200:
                return response.json()['response']['GeoObjectCollection']['featureMember'][0]['GeoObject']
            else:
                return None
        except Exception:
            return None
    else:
        return None


# перепишем функцию для Selenium
def get_lot_columns(lot_id, driver):
    url_lot = 'https://xn----etbpba5admdlad.xn--p1ai/lot/' + lot_id
    driver.get(url_lot)
    time.sleep(2)
    lot_data_dict = {}
    columns = ['region', 'category', 'initial_price', 'current_price', 'deposit', 'step',
               'full_descr', 'status', 'offer_price_start_dt', 'application_start_dt', 'application_end_dt', 
              'period_application_start_dt', 'period_application_end_dt', 'trades_type', 'trades_section', 'trades_market']
    for col in columns:
        lot_data_dict[col] = None

    
       # атрибуты лота
    lot_data_list_keys = driver.find_elements(By.CSS_SELECTOR, 'ul.lot-data__list > li > p span')
    lot_data_list = driver.find_elements(By.CSS_SELECTOR, 'ul.lot-data__list > li > p')
    # информация о лоте
    lot_section_list = [element.get_attribute('outerHTML') for element in driver.find_elements(By.CSS_SELECTOR, "div[id='info'] p")]
    # циклы
    for key, value in zip(lot_data_list_keys, lot_data_list):
        item_key = key.text.replace(':', '').lower()
        if item_key == 'регион':
            lot_data_dict['region'] = value.text.strip().split(':')[1].strip()
        elif item_key == 'начало приема ценовых предложений':
            lot_data_dict['offer_price_start_dt'] = search_date(value.text.strip())
        elif item_key == 'начало приёма заявок':
            lot_data_dict['application_start_dt'] = search_date(value.text.strip())
        elif item_key == 'конец приёма заявок':
            lot_data_dict['application_end_dt'] = search_date(value.text.strip())
        elif item_key == 'тип торгов':
            lot_data_dict['trades_type'] = value.text.strip().split(':')[1].strip()
        elif item_key == 'секция торгов':
            lot_data_dict['trades_section'] = value.text.strip().split(':')[1].strip()
        elif item_key == 'площадка':
            lot_data_dict['trades_market'] = value.text.strip().split(':')[1].strip()
    for lot_section_item in lot_section_list:
        bs = BeautifulSoup(lot_section_item, 'html.parser')
        item_key = bs.select_one('span').text.replace(':', '').lower()
        if item_key == 'категория':
            lot_data_dict['category'] = bs.select_one('a').get_text(strip=True)
        elif item_key == 'начальная стоимость':
            lot_data_dict['initial_price'] = search_price(bs.select_one('span.js-share-search').get_text(strip=True))
        elif item_key == 'текущая цена':
            lot_data_dict['current_price'] = search_price(bs.select_one('span.js-share-search').get_text(strip=True))
        elif item_key == 'размер задатка':
            lot_data_dict['deposit'] = search_price(bs.select_one('span.js-share-search').get_text(strip=True))
        elif item_key == 'шаг':
            lot_data_dict['step'] = search_price(bs.select_one('span.js-share-search').get_text(strip=True))
        elif item_key == 'общая информация':
            lot_data_dict['full_descr'] = bs.select_one('span.js-share-search').get_text(strip=True).replace('\xa0', ' ')
    lot_data_dict['lot_id'] = lot_id

    return  pd.DataFrame([lot_data_dict])



def md5_media(link):
    media_link = 'https://xn--p1abd.xn----etbpba5admdlad.xn--p1ai/pictures/'+link
    img_data = requests.get(media_link).content
    return md5(img_data).hexdigest()


def get_lot_media(lot_id, driver):
    media_list = []
    url_lot = 'https://xn--p1abd.xn----etbpba5admdlad.xn--p1ai/lot/' + lot_id
    response = driver.find_element(By.CSS_SELECTOR, "div[class='lot-card__gallery']").get_attribute('outerHTML')
    soup = BeautifulSoup(response, 'html.parser')
    # больше 10 фотографий не заливаем
    for media_element in json.loads(soup.select_one("div[class='lot-card__gallery']")['data-gallery']):
            media_list.append(media_element['name'] + '.' + media_element['ext'])
    # добавляем md5 hash для дедубликации
    df_media = pd.DataFrame({'lot_id': lot_id, 'media_link': media_list, 'position':range(1, len(media_list)+1)})
    df_media['media_md5'] = df_media.media_link.apply(lambda x: md5_media(x))
    df_media = df_media.drop_duplicates(subset = ['lot_id', 'media_md5'])
    df_media['position'] = list(range(1, len(df_media)+1))
    return df_media


# url = 'https://xn--p1abd.xn----etbpba5admdlad.xn--p1ai/search'
url = 'https://xn----etbpba5admdlad.xn--p1ai/search'
url_maps = 'https://geocode-maps.yandex.ru/1.x/'
auth_key = '5b502330-184d-44af-b322-72b8c1d14ef0'

def load_data_from_api_to_stg():

    remote_webdriver = 'remote_chromedriver'
    # with webdriver.Remote(f'{remote_webdriver}:4444/wd/hub',options=chrome_options) as driver:
    driver = webdriver.Remote(f'{remote_webdriver}:4444/wd/hub',options=chrome_options)
    driver.get(url_search)
    driver.maximize_window()
    time.sleep(2)

    # лоты
    lot_items = driver.find_elements(By.CSS_SELECTOR, 'ul.search-page-cards__list > li')
    # есть ли карусель пагинации?
    check_pagination = driver.find_elements(By.CSS_SELECTOR, "ul.pagination>li[class = 'page-item active'],li[class = 'page-item']")

    if len(check_pagination) == 0 and len(lot_items) == 0:
        print('No elements founded!')
    elif len(check_pagination) == 0 and len(lot_items) != 0:
        page_cnt = 1
        print('Number of pages: {}'.format(page_cnt))
    else:
        # берем значение цифры в элементе перед стрелкой вправо (предпоследняя карточка в карусели)
        page_cnt = int(check_pagination[-2].text)
        print('Number of pages: {}'.format(page_cnt))
    # создаем массивы
    lots_list = []
    lots_medial_list = []
    # вырубим драйвер
    driver.quit()
    for page_num in range(1, page_cnt + 1):
        print('Page_num: {}'.format(page_num))
        url_search_page = url_search + '&page={}'.format(page_num)
        driver = webdriver.Remote(f'{remote_webdriver}:4444/wd/hub',options=chrome_options)
        driver.maximize_window()
        driver.get(url_search_page)
        time.sleep(2)
        lot_items = [lot_id.text for lot_id in driver.find_elements(By.CSS_SELECTOR, 'ul.search-page-cards__list b.text-primary')]
        # итерируемся по лотам
        for lot_id in lot_items:
            try:
                df_lot_id = get_lot_columns(lot_id, driver)
                df_lot_media = get_lot_media(lot_id, driver)
                lots_list.append(df_lot_id)
                lots_medial_list.append(df_lot_media)
            except Exception:
                print('Exception with lot_id:' + lot_id)
        driver.quit()
    # какие лоты имеют описание?
    df = pd.concat(lots_list)
    df_media = pd.concat(lots_medial_list)
    lot_id_valid = df[df['full_descr'].notna()].lot_id.values
    # оставим только те лоты, где описание есть!
    df = df[df['lot_id'].isin(lot_id_valid)].copy()
    df_media = df_media[df_media['lot_id'].isin(lot_id_valid)].copy()
    # оставим только те записи, где есть ссылки
    df_media_cut = df_media[df_media['media_link'].notna()].copy()
    logging.info('Final table shape: ' + str(df.shape[0]))
    logging.info('Final media table shape: ' + str(df_media_cut.shape[0]))

    if len(df) == 0:
        raise ValueError('No data was loaded from API!')
    else:
        conn = create_engine(conn_string)
        logging.info('Start truncate option for table stg.lot_data')
        conn.execution_options(autocommit=True).execute("""TRUNCATE TABLE stg.lot_data""")
        logging.info('Truncate option is done')
        logging.info('Start loading dataframe to postgres')
        df.to_sql(name = 'lot_data', schema = 'stg', con=conn, if_exists='append', index=False)
        logging.info('Lot data is loaded to stage')
        logging.info('Start truncate option for table stg.lot_media')
        conn.execution_options(autocommit=True).execute("""TRUNCATE TABLE stg.lot_media""")
        logging.info('Truncate option is done')
        df_media.to_sql(name = 'lot_media', schema = 'stg', con=conn, if_exists='append', index=False)

def load_data_from_stg_to_etl():
    conn = create_engine(conn_string)
    logging.info("""Start inserting lots with action_flg 'D'""")
    conn.execute(q_action_delete)
    logging.info("""Start inserting lots with action_flg 'I/U'""")
    conn.execute(q_action_update_insert)
    logging.info("""Start inserting media""")
    conn.execute(q_new_media)

def load_data_info_to_etl():
    conn = create_engine(conn_string)
    logging.info("""Start parse info for new lots""")
    df = pd.read_sql(sql = q_new_lot_data_info, con = conn)
    logging.info("""New lots number: """ + str(df.shape[0]))
    if len(df)>0:
        # ЕГРН
        df['oks_num'] = df['full_descr'].apply(lambda x: list(set(re.findall(pattern=oks_mask, string = x))))
        # площадь
        df['square_list'] =  df.full_descr.apply(lambda x: re.findall(pattern=square_mask, string = x.lower()))
        df['square'] = df.square_list.apply(lambda x: get_unique_square(x))
        # доля
        df['is_share'] = df['full_descr'].apply(lambda x: int(len(re.findall(pattern=share_mask, string=x.lower()))>0))
        # адрес
        df['address'] = df.full_descr.apply(lambda x: get_adress(x))

        # добавляем address_json только для тех объектов, для которых это возможно: 
        # 1 кадастровый номер, не пустой адрес, 1 значение площади, заполнен адрес
        df[['address_formatted', 'longitude', 'latitude', 'address_json']] = \
        df.apply(lambda x: get_geo_json(x['address']) if (len(x['oks_num'])==1)&(len(x['square'])==1)&(pd.notna(x['address'])) else [None, None, None, None],
              axis = 1, result_type = 'expand')
        # попробуем найти ближайшее метро
        df['cord_json'] = df.apply(lambda x: get_cord_json(x), axis = 1)

        logging.info("""Start inserting info for new lots""")
        df[['id','lot_id', 'oks_num', 'square', 'is_share', 'address', 'address_formatted', 
        'longitude', 'latitude', 'address_json', "cord_json"]].to_sql(name = 'lot_data_info', schema = 'etl', con = conn, if_exists = 'append', index = False, dtype={"address_json": sqlalchemy.types.JSON, "cord_json":sqlalchemy.types.JSON})


with DAG(
        dag_id='parse_lot_api_to_db',
        start_date = datetime(year = 2024, month = 7, day = 2, hour = 4, minute = 30, tzinfo=LOCAL_TZ),
        schedule_interval='0 5 * * *',
        catchup=False,
        default_args = {
           'owner':'d.kakurin',

       },
        tags=['parsing'],
) as dag:
    start_task = EmptyOperator(
        task_id="start_task"
    )

    end_task = EmptyOperator(
        task_id="end_task"
    )

    load_data_from_api_to_stg = PythonOperator(
        task_id='from_api_to_stg',
        python_callable=load_data_from_api_to_stg
    )

    load_data_from_stg_to_etl = PythonOperator(
        task_id='from_stg_to_etl',
        python_callable=load_data_from_stg_to_etl
    )

    load_data_info_to_etl = PythonOperator(
        task_id='lots_info_to_etl',
        python_callable=load_data_info_to_etl
    )

start_task >> load_data_from_api_to_stg >> load_data_from_stg_to_etl >> load_data_info_to_etl >> end_task