#!/usr/bin/env python3
# !-*- encoding: utf-8 -*-


# pip3 install kafka-python -i https://pypi.douban.com/smiple
# pip3 install avro-python3 -i https://pypi.douban.com/smiple
import sys
import time
import json

from kafka import KafkaProducer
from kafka import KafkaConsumer
from kafka.errors import KafkaError
# AVRO
import avro.schema
import avro.io
import io
import os
import argparse



class Kafka_Producer(object):
    """
    producer model
    """
    def __init__(self, bootstrap_server, topic):
        self.kafkatopic = topic

        print("begin to connect ip->{h} topic:{t}".format(h=bootstrap_server, t=topic))

        #self.producer = KafkaProducer(bootstrap_servers='192.168.200.172:9091,192.168.200.172:9092,192.168.200.172:9093')
        self.producer = KafkaProducer(bootstrap_servers=bootstrap_server)

    def sendData(self, params):
        try:
            producer = self.producer
            if type(params) != bytes:
                try:
                    params_message = json.dumps(params, ensure_ascii=False)
                    params_message = params_message.encode('utf-8')
                except Exception as e:
                    print("json dumps error ",e)
            else:
                params_message = params
        
            producer.send(self.kafkatopic, value=params_message)
            producer.flush()
            #print("\rPublish a message to Kafka ...\r")
        except KafkaError as e:
            print(e)


class Kafka_consumer(object):
    """'
    消费模块: 通过不同groupid消费topic里面的消息
    """

    def __init__(self, kafkahost, kafkaport, kafkatopic, groupid):
        self.kafkaHost = kafkahost
        self.kafkaPort = kafkaport
        self.kafkatopic = kafkatopic
        self.groupid = groupid
        self.consumer = KafkaConsumer(self.kafkatopic, group_id=self.groupid,
                                      bootstrap_servers='{kafka_host}:{kafka_port}'.format(kafka_host=self.kafkaHost,kafka_port=self.kafkaPort)
                                      )

    def consume_data(self):
        try:
            for message in self.consumer:
                yield message
        except KeyboardInterrupt as e:
            print(e)


class AVROUtils(object):
    """
    AVRO Utils
    """
    SCHEME = {
        "type": "record",
        "name": "Event",
        "fields": [
            {"name": "headers", "type": {"type": "map", "values": "string"}},
            {"name": "body", "type": "bytes"}
        ]
    }
    scheme = avro.schema.SchemaFromJSONData(SCHEME)

    @classmethod
    def deseriallizer(cls, message_value):
        buffer = io.BytesIO(message_value)
        decoder = avro.io.BinaryDecoder(buffer)
        reader = avro.io.DatumReader(cls.scheme)
        return reader.read(decoder)

    @classmethod
    def encodelizer(cls, message_value):
        buffer = io.BytesIO()
        encoder = avro.io.BinaryEncoder(buffer)
        writer = avro.io.DatumWriter(writer_schema=cls.scheme)
        writer.write(message_value, encoder)
        return buffer.getvalue()

    @classmethod
    def to_formatted_json(cls,data):
        return {
            "headers": {
                "byteoffset": "1234",
                "source": "/home/coremail/logs/mailclusterjson/mailcluster2020_11_19/mailcluster15_00_00.log",
                "host": "hzxs-cac-test",
                "regionip": "123123"
            },
            "body": data
        }


def parse_args():
    parser = argparse.ArgumentParser(description="Kafka Producer Python Tools")
    parser.add_argument('-t', '--topic', default='')
    parser.add_argument('-b', '--bootstrap_server', default='192.168.200.172')
    parser.add_argument('-f', '--file', default='')
    parsed_args = parser.parse_args()
    return {
        'topic':parsed_args.topic,
        'bootstrap_server':parsed_args.bootstrap_server,
        'file':parsed_args.file
    }

import time
if __name__ == '__main__':

    args = parse_args()
    KAFAKA_TOPIC = args['topic']
    KAFAKA_HOST = args['bootstrap_server']
    file = args['file']
    producer = Kafka_Producer(KAFAKA_HOST, KAFAKA_TOPIC)
    cnt = 0 
    while file != "":
        with open(file,"rb") as f:
            for line in f:
                tmpjson = AVROUtils.to_formatted_json(line.strip())
                encode_avro_data = AVROUtils.encodelizer(tmpjson)
                producer.sendData(encode_avro_data)
                cnt += 1
                if cnt%10 == 0:
                    print('producer: %d'%cnt)
                    time.sleep(3)
        print("send {cnt} Msg OK".format(cnt=cnt))
    
# run: python3 kafka_producer.py -t test -b '192.168.200.172:9095,192.168.200.172:9096,192.168.200.172:9097' -f maillog
