from pyspark.sql.types import StructType, StringType, StructField
from pyspark.sql.functions import udf
from user_agents import parse

from constant import PATH_FILTER

import geoip2.database

ip_schema = StructType([
    StructField("ip", StringType(), True),
    StructField("location.lat", StringType(), True),
    StructField("location.lon", StringType(), True),
    StructField("country", StringType(), True),
    StructField("city", StringType(), True),
    StructField("region_name", StringType(), True),
    StructField("continent_name", StringType(), True),
    StructField("region_iso_code", StringType(), True)
])


@udf(returnType=ip_schema)
def ip_info_udf(remote, proxy):
    ip = remote or proxy or ""
    if not ip:
        return ['', '', '', '', '', '', '', '']
    # TODO: use es geoip
    reader = geoip2.database.Reader('../data/GeoLite2-City.mmdb')
    response = reader.city(ip)
    lat = response.location.latitude
    lon = response.location.longitude
    country = response.country.iso_code
    city = response.city.name
    region_name = response.subdivisions.most_specific.name
    continent_name = response.continent.name
    region_iso_code = response.subdivisions.most_specific.iso_code
    return [ip, lat, lon, country, city, region_name, continent_name, region_iso_code]


agent_schema = StructType([
    StructField("browser", StringType(), True),
    StructField("os", StringType(), True),
    StructField("equipment", StringType(), True)
])


# Define our function that return according to UDF schema
@udf(returnType=agent_schema)
def agent_info_udf(ua_string):
    # parse library cannot parse None
    if ua_string is None:
        ua_string = ""
    user_agent = parse(ua_string)
    output = [
        user_agent.get_browser(),
        user_agent.get_os(),
        user_agent.get_device()
    ]
    return output

@udf()
def ismatch_udf(s):
    if not s or '.' not in s:
        return 0
    if not s.endswith(PATH_FILTER):
        return 0
    return 1