import io
import avro.schema
from avro.io import DatumWriter
from kafka import KafkaProducer, future
from kafka import KafkaConsumer, KafkaProducer
from kafka import KafkaClient
import json
from func_timeout import func_set_timeout


class KProducer:
    def __init__(self, bootstrap_servers, topic):
        """
        kafka 生产者
        :param bootstrap_servers: 地址
        :param topic:  topic
        """
        self.producer = KafkaProducer(
            bootstrap_servers=bootstrap_servers,
            value_serializer=lambda m: json.dumps(m).encode('ascii'), )  # json 格式化发送的内容
        self.topic = topic

    def sync_producer(self, data_li: list):
        """
        同步发送 数据
        :param data_li:  发送数据
        :return:
        """
        for data in data_li:
            future = self.producer.send(self.topic, data)
            record_metadata = future.get(timeout=10)  # 同步确认消费
            partition = record_metadata.partition  # 数据所在的分区
            offset = record_metadata.offset  # 数据所在分区的位置
            print('save success, partition: {}, offset: {}'.format(partition, offset))

    def asyn_producer(self, data_li: list):
        """
        异步发送数据
        :param data_li:发送数据
        :return:
        """
        for data in data_li:
            self.producer.send(self.topic, data)
        self.producer.flush()  # 批量提交

    def asyn_producer_callback(self, data_li: list):
        """
        异步发送数据 + 发送状态处理
        :param data_li:发送数据
        :return:
        """
        for data in data_li:
            self.producer.send(self.topic, data).add_callback(self.send_success).add_errback(self.send_error)
        self.producer.flush()  # 批量提交

    def send_success(self, *args, **kwargs):
        """异步发送成功回调函数"""
        print('save success')
        return

    def send_error(self, *args, **kwargs):
        """异步发送错误回调函数"""
        print('save error')
        return

    def close_producer(self):
        try:
            self.producer.close()
        except:
            pass


@func_set_timeout(5)
def test_kafka(bootstrap_servers):
    try:
        KAFKA = KafkaClient(bootstrap_servers)
        KAFKA.close()
        return True
    except:
        return False

@func_set_timeout(5)
def test_kafka2(bootstrap_servers):
    try:
        KAFKA = MyKafka(bootstrap_servers)
        return True, KAFKA
    except:
        return False, None

class MyKafka:
    def __init__(self, bootstrap_servers):
        self.schema = None
        self.kafka_connection = None
        if self.test_connection(bootstrap_servers):
            self.topic_list = self.kafka_connection.topics
            self.producer = KafkaProducer(self.kafka_connection)
        else:
            raise ConnectionError(f"can not connect to bootstrap_servers: {bootstrap_servers}")

    def __del__(self):
        self.kafka_connection.close()

    def send_string_data(self, topic_name: str, data: str, data_method='data'):
        if data_method == 'file':
            f = open(data, encoding='utf-8')
            string_line = f.readline()
            while string_line:
                self.producer.send_messages(topic_name, string_line.encode("utf-8"))
                string_line = f.readline()
        else:
            data_list = data.split("\n")
            for data_string in data_list:
                self.producer.send_messages(topic_name, data_string.encode("utf-8"))

    def send_avro_data(self, topic_name: str, data: str, schema_path: str, data_method='data'):
        self.schema = avro.schema.parse(open(schema_path).read())
        num = 1
        if data_method == "data":
            data_list = data.split("\n")
            for dic_data in data_list:
                self.__send_avro_data(dic_data, topic_name)
                print(num)
                num = num + 1
        else:
            json_file = open(data, "r", encoding="utf-8")
            dic_data = json_file.readline()
            while dic_data:
                self.__send_avro_data(dic_data, topic_name)
                dic_data = json_file.readline()
                print(num)
                num = num + 1
            json_file.close()

    def __send_avro_data(self, string_data, topic_name):
        try:
            data = json.loads(string_data)
            writer = DatumWriter(self.schema)
            bytes_writer = io.BytesIO()
            encoder = avro.io.BinaryEncoder(bytes_writer)
            writer.write(data, encoder)
            raw_bytes = bytes_writer.getvalue()
            self.producer.send_messages(topic_name, raw_bytes)
        except Exception as errors:
            print(str(errors))


def list_topics(bootstrap_servers):
    KAFKA = KafkaClient(bootstrap_servers)
    topics = KAFKA.topics
    KAFKA.close()
    # print(KAFKA.topics)
    return topics


def send_data_by_data(bootstrap_servers, topic_name, data_string, schema_path):
    SCHEMA_PATH = schema_path

    # json_file = open(json_path,"r",encoding="utf-8")
    data_list = data_string.split("\n")
    # dic_data = json_file.readline()

    # To send messages synchronously
    KAFKA = KafkaClient(bootstrap_servers)
    # Path to user.avsc avro schema

    PRODUCER = KafkaProducer(KAFKA)
    SCHEMA = avro.schema.parse(open(SCHEMA_PATH).read())

    num = 1
    for dic_data in data_list:
        try:
            data = json.loads(dic_data)
            writer = DatumWriter(SCHEMA)
            bytes_writer = io.BytesIO()
            encoder = avro.io.BinaryEncoder(bytes_writer)
            writer.write(data, encoder)
            raw_bytes = bytes_writer.getvalue()
            PRODUCER.send_messages(topic_name, raw_bytes)
        except Exception as errors:
            print(str(errors))
        print(num)
        num = num + 1
    KAFKA.close()


def send_data_by_file(bootstrap_servers, topic_name, json_path, schema_path):
    SCHEMA_PATH = schema_path

    json_file = open(json_path, "r", encoding="utf-8")
    dic_data = json_file.readline()

    # To send messages synchronously
    KAFKA = KafkaClient(bootstrap_servers)
    # Path to user.avsc avro schema

    PRODUCER = KafkaProducer(KAFKA)
    SCHEMA = avro.schema.parse(open(SCHEMA_PATH).read())

    num = 1
    while (dic_data):
        try:
            data = json.loads(dic_data)
            writer = DatumWriter(SCHEMA)
            bytes_writer = io.BytesIO()
            encoder = avro.io.BinaryEncoder(bytes_writer)
            writer.write(data, encoder)
            raw_bytes = bytes_writer.getvalue()
            PRODUCER.send_messages(topic_name, raw_bytes)
            # print(dic_data)
        except Exception as errors:
            print(str(errors))
        dic_data = json_file.readline()
        print(num)
        num = num + 1
    json_file.close()
    KAFKA.close()


def send_string_data(bootstrap_servers, topic_name, data, data_method='data'):
    print("发送string类型的数据")
    KAFKA = KafkaClient(bootstrap_servers)
    PRODUCER = KafkaProducer(KAFKA)
    if data_method == 'file':
        f = open(data, encoding='utf-8')
        string_line = f.readline()
        while string_line:
            PRODUCER.send_messages(topic_name, string_line.encode("utf-8"))
            string_line = f.readline()
    else:
        data_list = data.split("\n")
        for data_string in data_list:
            PRODUCER.send_messages(topic_name, data_string.encode("utf-8"))
    KAFKA.close()


if __name__ == "__main__":
    bootstrap_servers = "192.168.80.103:6667"
    # topic_name = "test"
    # send_data = "E:/DEYE-6.2/DEYE-6.2.3/小工具/send_lot_data/data/email01.txt"
    # schema_path = "E:/DEYE-6.2/DEYE-6.2.3/小工具/send_lot_data/schema/dwd/dwd-pr-email.avsc"
    # f = open(send_data,encoding="utf-8")
    # data_string = f.read()
    # f.close()
    # send_data_by_data(bootstrap_servers,topic_name,data_string,schema_path)

    # bootstrap_servers = "172.16.80.10:6667"
    test_reslut = test_kafka(bootstrap_servers)
    print(test_reslut)

    topics = list_topics(bootstrap_servers)
    print(topics)

    # send_string_data(bootstrap_servers,"test")
