from datetime import datetime, timedelta
from airflow import DAG

from airflow.operators.python import PythonOperator
from airflow.operators.empty import EmptyOperator

import psycopg2
import pandas as pd
import sqlalchemy
from sqlalchemy import create_engine, text
from sqlalchemy.orm import sessionmaker
from random import choice
import pendulum

import requests
from bs4 import BeautifulSoup
import logging
import re
import json

import os
from PIL import Image
import io

# ------------------------------------------------------------------------------
# Словарь для фильтра
# ------------------------------------------------------------------------------

# ------------------------------------------------------------------------------
# Конфиг с параметрами
# ------------------------------------------------------------------------------
with open('config/config.json') as f:
    config_json = json.load(f)
    conn_string = config_json['db_local_string']
    lot_data_columns = config_json['lot_data_columns']
    search_params = config_json['search_params']

link_media_mask = 'https://xn--p1abd.xn----etbpba5admdlad.xn--p1ai/pictures/'
link_media_mask_2gis = 'https://'
# ------------------------------------------------------------------------------
# user_agents
# ------------------------------------------------------------------------------
with open('config/user_agent_2gis.txt') as file:
    user_agent_lines = file.read().split('\n')
# ------------------------------------------------------------------------------
# SQL
# ------------------------------------------------------------------------------
q_new_media = """
select
    lot_id,
    media_link,
    loaded_cnt
from
    (
    select
        lot_id,
        media_link,
        loaded_flg,
        sum(loaded_flg) over(partition by lot_id) as loaded_cnt
    from
        etl.{table_name}) s
where
    loaded_flg = 0;
"""


q_update = """
update
    etl.{table_name}
set
    width = {width},
    height = {height},
    type = '{type}',
    loaded_flg = 1
where
    lot_id = '{lot_id}'
    and media_link = '{link}';
"""


LOCAL_TZ = pendulum.timezone("Europe/Moscow")


def load_media_to_local():
    conn = create_engine(conn_string)
    df_media = pd.read_sql(q_new_media.format(table_name = 'lot_media'), con = conn)
    logging.info("""New media files cnt: """ + str(df_media.shape[0]))
    logging.info("""Start downloading images to memory""")
    df_list = []
    for lot_id in df_media.lot_id.unique():
        try:
            link_list = df_media[df_media['lot_id'] == lot_id].media_link.values
            for link in link_list:
                media_link = link_media_mask + link
                # print('media_link: ' + media_link)
                img_data = requests.get(media_link).content
                img = Image.open(io.BytesIO(img_data))
                img_width, img_height = img.size
                # img_path = './media/' + lot_id + '/torgi'
                # img_name = lot_id + '_' + str(i) +  img_format
                # print('Trying to create directory: ' + img_path)
                # if not os.path.exists(img_path):
                #     print('No directory, trying to create new one: ' + img_path)
                #     os.makedirs(img_path)
                # with open(img_path + '/' + img_name, 'wb') as loader:
                #     print('Trying to write image...')
                #     loader.write(img_data)
                # loader.close()
                df_list.append(pd.DataFrame({'lot_id':lot_id,
                                                    'media_link':[link],
                                                    'width':[img_width],
                                                    'height':[img_height],
                                                    'type':[img.format]}))
        except Exception:
            print('Exception lot_id: ' + lot_id)

    if len(df_list) == 0:
        print('load_media_to_local final df length = 0!')
        pass
    else:
        with create_engine(conn_string).begin() as conn:
            for row in pd.concat(df_list).values:
                conn.execute(text(q_update.format(lot_id = row[0],
                                link = row[1],
                                width = row[2],
                                height = row[3],
                                type = row[4],
                                table_name = 'lot_media')))

def load_media_to_local_2gis():
    conn = create_engine(conn_string)
    df_media = pd.read_sql(q_new_media.format(table_name = 'lot_media_2gis'), con = conn)
    logging.info("""New media files cnt: """ + str(df_media.shape[0]))
    logging.info("""Start downloading images to memory""")
    df_list = []
    for lot_id in df_media.lot_id.unique():
        try:
            link_list = df_media[df_media['lot_id'] == lot_id].media_link.values
            for link in link_list:
                media_link = link_media_mask_2gis + link
                headers = {'User-Agent': choice(user_agent_lines)}
                res = requests.get(media_link, headers = headers, timeout=20)
                img_data = res.content
                img = Image.open(io.BytesIO(img_data))
                img_width, img_height = img.size
                df_list.append(pd.DataFrame({'lot_id':lot_id,
                                                    'media_link':[link],
                                                    'width':[img_width],
                                                    'height':[img_height],
                                                    'type':[img.format]}))
        except Exception:
            print('Exception lot_id: ' + lot_id)

    if len(df_list) == 0:
        print('load_media_to_local_2gis final df length = 0!')
        pass
    else:
        with create_engine(conn_string).begin() as conn:
            print('Engine was created!')
            for row in pd.concat(df_list).values:
                print(row)
                conn.execute(text(q_update.format(lot_id = row[0],
                                link = row[1],
                                width = row[2],
                                height = row[3],
                                type = row[4],
                                table_name = 'lot_media_2gis')))

with DAG(
        dag_id='download_images_daily',
        start_date = datetime(year = 2023, month = 3, day = 30, hour = 1, minute = 0, tzinfo=LOCAL_TZ),
        schedule_interval='30 2 * * *',
        catchup=False,
        default_args = {
           'owner':'d.kakurin',

       },
        tags=['parsing'],
) as dag:
    start_task = EmptyOperator(
        task_id="start_task"
    )

    end_task = EmptyOperator(
        task_id="end_task"
    )

    load_media_to_local = PythonOperator(
        task_id='download_images',
        python_callable=load_media_to_local
    )

    load_media_to_local_2gis = PythonOperator(
        task_id='download_images_2gis',
        python_callable=load_media_to_local_2gis
    )

start_task >> load_media_to_local >> load_media_to_local_2gis >> end_task