from datetime import datetime, timedelta, date
from airflow import DAG

from airflow.operators.python import PythonOperator
from airflow.operators.empty import EmptyOperator
import urllib.parse
import psycopg2
import pandas as pd
import sqlalchemy
from sqlalchemy import create_engine, text
from sqlalchemy.orm import sessionmaker
from airflow.models.xcom import XCom
import time
from random import choice
import pendulum

import requests
from bs4 import BeautifulSoup
import re

from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.chrome.options import Options
import json



LOCAL_TZ = pendulum.timezone("Europe/Moscow")
# ------------------------------------------------------------------------------
# Конфиг с параметрами
# ------------------------------------------------------------------------------
with open('config/config.json') as f:
    config_json = json.load(f)
    conn_string = config_json['db_local_string']
    lot_data_columns = config_json['lot_data_columns']
    search_params = config_json['search_params']
# ------------------------------------------------------------------------------
# user_agents
# ------------------------------------------------------------------------------
with open('config/user_agent_2gis.txt') as file:
    user_agent_lines = file.read().split('\n')
# ------------------------------------------------------------------------------
# proxy
# ------------------------------------------------------------------------------
with open('config/proxy.txt') as file:
    proxy_file = file.read().split('\n')
# ------------------------------------------------------------------------------
# SQL
# ------------------------------------------------------------------------------

q_lots_2gis = """
select
    ld.id,
    ld.lot_id,
    ldi.address_json
from
    (
    select
        id,
        lot_id,
        action_flg,
        row_number() over(partition by lot_id
    order by
        id) as rn
    from
        etl.lot_data) ld
join etl.lot_data_info ldi
        using(id)
where
    rn = 1
    and action_flg <> 'D'
    and (ldi.address_json->>'uri') is not null
    and ld.lot_id not in (
    select
        lot_id
    from
        etl.lot_2gis_address_media
where api_status_code = 200 and coalesce(array_length(src_array , 1), 0)>0);
"""

q_src = """
select
    id,
    lot_id,
    unnest(src_array) as media_link
from
    etl.lot_2gis_address_media
where
    api_status_code = 200
    and lot_id not in (
    select
        lot_id
    from
        etl.lot_media_2gis);
"""


chrome_options = Options()
chrome_options.add_argument('--no-sandbox')
chrome_options.add_argument('--window-size=1920,1080')
chrome_options.add_argument('--headless')
chrome_options.add_argument('--disable-dev-shm-usage')
chrome_options.add_argument('--disable-gpu')
chrome_options.add_argument("--disable-notifications")
chrome_options.add_argument("--remote-debugging-port=9222")
chrome_options.add_experimental_option("excludeSwitches", ["enable-automation"])
chrome_options.add_experimental_option('useAutomationExtension', False)
chrome_options.add_experimental_option("excludeSwitches", ["disable-popup-blocking"])



# раскладываем адрес на понятный для 2gis язык
def get_house_address(address_json):
    address_search = []
    _components = address_json['metaDataProperty']['GeocoderMetaData']['Address']['Components']
    for _ in _components:
        if _['name'] not in ['Россия', 'Центральный федеральный округ']+address_search and \
        _['kind'] not in ['entrance', 'other']:
           address_search.append(_['name'])
    return address_search

def get_img_links(object_id):
    src_list, srcset_list = [], []
    try:
        url = f'https://2gis.ru/moscow/gallery/geo/' + object_id + '/'
        print(url)
        remote_webdriver = 'remote_chromedriver'
        with webdriver.Remote(f'{remote_webdriver}:4444/wd/hub',options=chrome_options) as driver:
            driver.maximize_window()
            driver.get(url)
            time.sleep(5)
            img_elements = driver.find_elements(By.XPATH, '//img')
            for img in img_elements:
                src_list.append(img.get_attribute('src'))
                srcset_list.append(img.get_attribute('srcset'))
            return src_list, srcset_list
    except Exception:
        print('Fuck! Exception!')
        return src_list, srcset_list

def clean_link_image_array(array):
    clean_array = []
    for link in array:
        if link != '':
            m = re.search('(https.+?\.jpg)[\w]*', link)
            if m:
                clean_array.append(m.group(1))
    return clean_array

    # нужно писать сложную логику, но это реально
def get_2gis_media(house_address, lines = user_agent_lines, proxy_file = proxy_file):
    try:
        ip = choice(proxy_file)
        proxy = {
                    'http': f'http://{ip}',
                    'https': f'https://{ip}'
                }
        headers = {'User-Agent': choice(lines)}
        # составляем geo_url
        url = 'https://2gis.ru/moscow/search/'
        geo_url = url + urllib.parse.quote(house_address, safe='') + '/filters/has_photos/'
        print(geo_url)
        res = requests.get(url = geo_url, headers = headers, timeout = 30)
        print(f'Status code:{res.status_code}')
        if res.status_code == 200:
            object_id = None
            # доверяем ранжированию 2gis
            first_media_link = re.findall(pattern = r'(?:https://i[0-9].photo.2gis.com/main/geo/)[0-9/]*view', string = res.text)
            object_id = re.findall('[0-9]+/view', first_media_link[0])[0].replace('/view', '')
            time.sleep(2)
            return [object_id, res.status_code]
        else:
            return [None, res.status_code]
    except Exception:
        return [None, res.status_code]
        
def get_2gis_media_loop(house_address, lines, n_iters = 5):
    final_result = [None, None]
    for i in range(n_iters):
        # выполняем функцию 1-ый раз
        result = get_2gis_media(house_address, lines)
        if result[0] != None:
            final_result = result
            print(final_result)
            break
    return final_result

def cut_link(link):
    try:
        search_res = re.search(pattern = "(_[0-9]{3,4}x[0-9]{3,4})\.", string = link).group(1)
        return link.replace(search_res, '')
    except Exception:
        return link
    

def get_object_id_2gis():
    df = pd.read_sql(sql = q_lots_2gis, con = create_engine(conn_string)).copy()
    print('New lots cnt: ' + str(df.shape[0]))
    if df.shape[0] == 0:
        raise ValueError('There is no any new lots!')
    # оформим короткую версию адреса
    df['address_search'] = df.address_json.apply(lambda x: ', '.join(get_house_address(x)))
    df[['object_id', 'api_status_code']] = df.apply(lambda x: get_2gis_media_loop(x.address_search, lines = user_agent_lines),
                                                                                  axis = 1, result_type = 'expand')
    # после этого нужно оставить только новые object_id
    print('Start getting img links...')
    df[['src_array', 'srcset_array']] = df.apply(lambda x: get_img_links(x['object_id']) if pd.notna(x['object_id']) else [list(), list()], axis = 1,
                                                                 result_type = 'expand')
    df['src_array_clean'] = df['src_array'].apply(lambda x: clean_link_image_array(x) if len(x)>0 else x)
    df['srcset_array_clean'] = df['srcset_array'].apply(lambda x: clean_link_image_array(x) if len(x)>0 else x)
    final_df = df[['id', 'lot_id', 'address_search', 'object_id', 'api_status_code', 'src_array_clean', 'srcset_array_clean']].copy()
    final_df = final_df.rename({'src_array_clean':'src_array', 
                'srcset_array_clean':'srcset_array'}, axis = 1)
    final_df[final_df['object_id'].notna()].to_sql(name = 'lot_2gis_address_media', schema='etl', con = create_engine(conn_string), if_exists='append', index = False)

def load_media_2gis():
    conn = create_engine(conn_string)
    df_src = pd.read_sql(sql = q_src, con = conn)
    src_list = []
    for lot_id in df_src.lot_id.unique():
        _df_src = df_src[df_src['lot_id']==lot_id].copy()
        _df_src['media_link_cut'] = _df_src['media_link'].apply(lambda x: cut_link(x).replace('https://', ''))
        _df_src = _df_src.drop_duplicates(subset = ['id', 'lot_id', 'media_link_cut'])
        _df_src['position'] = range(1, len(_df_src)+1)
        src_list.append(_df_src[['lot_id', 'media_link_cut', 'position']])
    df = pd.concat(src_list)
    df = df.rename({'media_link_cut':'media_link'}, axis = 1)
    df.to_sql(name = 'lot_media_2gis', schema='etl', con = conn, if_exists='append', index = False)


with DAG(
        dag_id='parse_2gis_media',
        start_date = datetime(year = 2023, month = 3, day = 30, hour = 0, minute = 0, tzinfo=LOCAL_TZ),
        schedule_interval='0 0 * * *',
        catchup=False,
        default_args = {
           'owner':'d.kakurin',
       },
        tags=['parsing'],
) as dag:
    start_task = EmptyOperator(
        task_id="start_task"
    )

    get_object_id_2gis = PythonOperator(
        task_id='get_object_id_API',
        python_callable=get_object_id_2gis
    )

    load_media_2gis = PythonOperator(
        task_id='load_media_2gis_to_etl',
        python_callable=load_media_2gis
    )

    end_task = EmptyOperator(
        task_id="end_task"
    )


start_task >> get_object_id_2gis >> load_media_2gis >>  end_task