from datetime import datetime, timedelta, date
from airflow import DAG

from airflow.operators.python import PythonOperator
from airflow.operators.empty import EmptyOperator
import urllib.parse
import psycopg2
import pandas as pd
import sqlalchemy
from sqlalchemy import create_engine, text
from sqlalchemy.orm import sessionmaker
from airflow.models.xcom import XCom
import time
from random import choice
import pendulum

import requests
from bs4 import BeautifulSoup
import re

from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.chrome.options import Options
import json



LOCAL_TZ = pendulum.timezone("Europe/Moscow")
auth_key = 'd1f0d47a-fc59-4b56-b5fe-695adddb265c'
url_maps = 'https://geocode-maps.yandex.ru/1.x/'
# ------------------------------------------------------------------------------
# Конфиг с параметрами
# ------------------------------------------------------------------------------
with open('config/config.json') as f:
    config_json = json.load(f)
    conn_string = config_json['db_local_string']
# ------------------------------------------------------------------------------
# user_agents
# ------------------------------------------------------------------------------
with open('config/user_agent_2gis.txt') as file:
    user_agent_lines = file.read().split('\n')


def get_geo_json(_geocode):
    params = {'apikey':auth_key, 
             'geocode':_geocode,
             'format':'json'}
    response = requests.get(url = url_maps, params = params)
    if response.status_code == 200:
        address_formatted = response.json()['response']['GeoObjectCollection']['featureMember'][0]['GeoObject']['metaDataProperty']\
        ['GeocoderMetaData']['Address']['formatted']
        return [address_formatted, response.json()['response']['GeoObjectCollection']['featureMember'][0]['GeoObject']]
    else:
         return [None, None]


def get_objects_to_db():
    # парсим сайт http://mosopen.ru/
    url = 'http://mosopen.ru/streets'
    response = requests.get(url=url)
    soup = BeautifulSoup(response.text, 'html.parser')
    districts = soup.select("div[id='regions_by_districts'] table")
    district_list = []
    for tr in soup.findAll('table')[1].findAll('tr'):
        for strong in tr.findAll('strong'):
            district_list.append(pd.DataFrame([{'district':strong.text, 
                                                'district_name':strong.select('a')[0]['title'],
                                              'district_url':strong.select('a')[0]['href']}]))
    district_df = pd.concat(district_list)
    district_street_list = []
    for district in district_df.district:
        df = district_df[district_df['district'] == district].copy()
        district_url = df['district_url'].values[0]
        # теперь для каждого района считываем улицы
        response = requests.get(url=district_url)
        soup = BeautifulSoup(response.text, 'html.parser')
        streets = soup.select("div.double_part > ul > li")
        streets_list = []
        for street in streets:
            district_street_list.append(pd.DataFrame([{
                                              'street':street.text, 
                                              'street_url':street.select('a')[0]['href']}]))
    # выкинем дубликаты, так как одна улица может лежать в нескольких районах сразу
    street_df = pd.concat(district_street_list).drop_duplicates()
    # к каждой улице подтаскиваем корпус
    house_list = []
    for street in street_df.street:
    # for street in ['Бауманская улица']:
        street_url = street_df[street_df['street']==street].street_url.values[0]
        # идем в конкретную улицу
        response = requests.get(url=street_url)
        soup = BeautifulSoup(response.text, 'html.parser')
        p_list = soup.find_all('p')
        if 'Список домов и строений' in p_list[3].text:
            try:
                for p in p_list[3].findAll('a'):
                    if 'показать' not in p.text:
                        house_list.append(pd.DataFrame([{'street':street,
                                                        'house':p.text,
                                                        'house_url':p['href']}]))
            except Exception:
                print('Exception with street: ' + street)
        else:
            pass
    house_df = pd.concat(house_list)
    print('Final objects cnt: ' + str(len(house_df)))


    df_list = []
    for house_url in house_df.house_url.unique():
        try:
            df = house_df[house_df['house_url']==house_url].copy()
            house_number = df.house.values[0]
            # стучимся по ссылке
            response = requests.get(url=house_url)
            soup = BeautifulSoup(response.text, 'html.parser')
            house_attr = soup.select("div[class='contact'] > dl")
            attr_dict = {'street':None, 'district':None, 'area':None, 'metro_station':None, 'metro_dist':None}
            for attr in house_attr:
                if attr.find('dt').text[:-1].lower() == 'улица':
                    attr_dict['street'] = attr.find('dd').text
                elif attr.find('dt').text[:-1].lower() == 'район':
                    attr_dict['district'] = attr.find('dd').text
                elif attr.find('dt').text[:-1].lower() == 'округ':
                    attr_dict['area'] = attr.find('dd').text

            # часть с метро
            metro_station_list = [x.text for x in attr.find_all('a')]
            metro_distance_list = [x.text[1:-1].replace('\xa0', ' ') for x in attr.find_all('span') if x.text not in metro_station_list]
            station_dict = {}
            for x in zip(metro_station_list, metro_distance_list):
               station_dict[x[0]] = x[1]
                    
            df['district'] = attr_dict['district']
            df['area'] = attr_dict['area']
            df['station_dict'] = json.dumps(station_dict)
            if attr_dict['street'] != None:
                df['street'] = attr_dict['street']
            df_list.append(df)
            if len(df_list) % 1e4 == 0:
                print(len(df_list))
        except Exception:
            print('Exception with house url:' + house_url)

    base_df = pd.concat(df_list)[['area', 'district', 'street', 'house', 'station_dict', 'house_url']].copy()

    # заинсертим в БД
    conn = create_engine(conn_string)
    base_df.to_sql(name = 'mos_objects', schema='etl', con = conn, if_exists='replace', index = False, dtype={"station_dict": sqlalchemy.types.JSON})


with DAG(
        dag_id='parse_mos_objects',
        start_date = datetime(year = 2023, month = 5, day = 7, hour = 0, minute = 0, tzinfo=LOCAL_TZ),
        schedule_interval=None,
        catchup=False,
        default_args = {
           'owner':'d.kakurin',
       },
        tags=['parsing'],
) as dag:
    start_task = EmptyOperator(
        task_id="start_task"
    )

    get_objects_to_db = PythonOperator(
        task_id='get_object',
        python_callable=get_objects_to_db
    )


    end_task = EmptyOperator(
        task_id="end_task"
    )


start_task >> get_objects_to_db >> end_task