# -*- coding: utf-8 -*-
from kafka import KafkaProducer, KafkaConsumer
from kafka.errors import KafkaError
import traceback
import json
import csv
import os
class Kafka_Transe():
    def __init__(self):
        # self.KAFAKA_HOST = "101.133.143.197"  # 服务器地址
        # self.KAFAKA_PORT = 9092  # 端口号
        self.KAFAKA_HOST = os.environ['KAFAKA_HOST']  # 服务器地址
        self.KAFAKA_PORT = (int)(os.environ['KAFAKA_PORT'])  # 端口号
        self.KAFAKA_TOPIC = "test"  # topic
        self.GROUP_ID = 'test2'
    def producer_demo(self):
        csvfile = open('data/002.csv', 'r')
        fieldnames = csvfile.readline().strip('\n').split(',')
        reader = csv.DictReader(csvfile, fieldnames)
        # 假设生产的消息为键值对（不是一定要键值对），且序列化方式为json
        producer = KafkaProducer(
            bootstrap_servers='{kafka_host}:{kafka_port}'.format(
                kafka_host=self.KAFAKA_HOST,kafka_port=self.KAFAKA_PORT),
            #key_serializer=lambda k: json.dumps(k).encode(),
            value_serializer=lambda m: json.dumps(m).encode('utf-8'))
        # 发送300条消息
        i = 0
        for row in reader:
            # str.decode('UTF-8')
            # print str
            if i >= 300:
                break
            future =producer.send(self.KAFAKA_TOPIC, value=row)
            print("send " + str(row))
            i += 1

        #producer.send(self.KAFAKA_TOPIC, value='1:1')
        #print("send {}".format())
        # for i in range(0, 3):
        #     future = producer.send(
        #         'logtest0', #topic
        #         key='count_num',  # 同一个key值，会被送至同一个分区
        #         value=str(i))
        #         #partition=0)  # 向分区1发送消息
        #print("send {}".format(str(i)))
        try:
            future.get(timeout=10) # 监控是否发送成功
        except KafkaError:  # 发送失败抛出kafka_errors
            traceback.format_exc()

    def consumer_demo(self):
        consumer = KafkaConsumer(
            self.KAFAKA_TOPIC,
            bootstrap_servers='{kafka_host}:{kafka_port}'.format(
                kafka_host=self.KAFAKA_HOST, kafka_port=self.KAFAKA_PORT),
            #group_id = self.GROUP_ID,
            # session_timeout_ms=6000,
            # heartbeat_interval_ms=2000
            #auto_offset_reset='earliest',  # 从最前开始消费（倒序）
            consumer_timeout_ms=8000,
            value_deserializer = lambda m: json.loads(m)
        )
        # print(consumer)
        jsonlist = []
        for message in consumer:
            value = message.value
            print(type(value))
            print(value)
            jsonlist.append(value)
        return jsonlist
            # print("receive, key: {}, value: {}".format(
            #     json.loads(message.key.decode()),
            #     json.loads(message.value.decode())
            #     )
            # )
        # index = 0
        # while True:
        #     msg = consumer.poll(timeout_ms=5)  # 从kafka获取消息
        #     print(msg)
        #     time.sleep(2)
        #     index += 1
        #     print('--------poll index is %s----------' % index)








