from __future__ import print_function
# -*- coding:utf-8 -*-

from sink.KafkaSink import KafkaSink

__author__ = 'gin.chen'

import sys
from pyspark.sql import SparkSession
from pyspark.streaming import StreamingContext
from pyspark.streaming.kafka import KafkaUtils
import json
import datetime
import time

KAFKA_HOST = 'hadoop6'
KAFKA_PORT = 6667
KAFKA_TOPIC = "data-logs-sink"


def init_spark():
    ss = SparkSession.builder \
        .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") \
        .config("spark.streaming.backpressure.enabled", "true") \
        .getOrCreate()
    sc = ss.sparkContext
    sc.setLogLevel("warn")

    return sc


if __name__ == "__main__":

    if len(sys.argv) != 3:
        print("Usage: app.py <broker_list> <topic>", file=sys.stderr)
        exit(-1)

    sc = init_spark()

    ssc = StreamingContext(sc, 2)

    ssc.checkpoint("/tmp/KafkaLogConvert")

    brokers, topic = sys.argv[1:]

    kvs = KafkaUtils.createDirectStream(ssc, [topic], {"metadata.broker.list": brokers})

    def convert(rows):
        result = []
        if rows:
            for row in rows:
                try:
                    row = json.loads(row[1])
                    if 'version' in row.keys():
                        if 'common' in row.keys():
                            row['common']['version'] = row.pop('version')
                        else:
                            row['common'] = {'version': row.pop('version')}
                    if 'latitude' in row.keys():
                        if 'location' in row.keys():
                            row['location']['lat'] = float(row.pop('latitude'))
                        else:
                            row['location'] = {'lat': float(row.pop('latitude'))}
                    if 'longtitude' in row.keys():
                        if 'location' in row.keys():
                            row['location']['lng'] = float(row.pop("longtitude"))
                        else:
                            row['location'] = {'lng': float(row.pop("longtitude"))}
                    if 'log_time' in row.keys():
                        dateline = int(time.mktime(
                            datetime.datetime.strptime(row.pop('log_time'), '%Y-%m-%d %H:%M:%S').timetuple()))
                        row['timestampMs'] = dateline
                    if 'app_type' in row.keys():
                        if row['app_type'] == 100:
                            if 'client' in row.keys():
                                if 'user_agent' in row['client']:
                                    if row['client']['user_agent'] == "iOS":
                                        if 'common' in row.keys():
                                            row['common']['platform'] = "iOS"
                                            row['common']['os'] = "iOS"
                                        else:
                                            row['common'] = {'platform': "iOS"}
                                            row['common']['os'] = "iOS"
                                    elif row['client']['user_agent'] == "Android":
                                        if 'common' in row.keys():
                                            row['common']['platform'] = "Android"
                                            row['common']['os'] = "Android"
                                        else:
                                            row['common'] = {'platform': "Android"}
                                            row['common']['os'] = "Android"
                                else:
                                    if 'common' in row.keys():
                                        row['common']['platform'] = "H5"
                                        row['common']['os'] = "UNKNOWN"
                                    else:
                                        row['common'] = {'platform': "H5"}
                                        row['common']['os'] = "UNKNOWN"
                    result.append(row)
                except Exception as e:
                    print(e.message)
                    continue

        return result

    def batch_send(rows):
        if rows:
            kop = KafkaSink(host=KAFKA_HOST, port=KAFKA_PORT, topic=KAFKA_TOPIC)
            for row in rows:
                kop.send_message(row)

    def process(rdd):
        rdd.mapPartitions(convert).foreachPartition(batch_send)


    kvs.foreachRDD(process)

    ssc.start()

    ssc.awaitTermination()
