import json

from kafka import KafkaProducer
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker

from app.models import DbiShop
from config import POSTGREPSQL_URL
from manage import app

engine = create_engine(app.config[POSTGREPSQL_URL])
session_factory = sessionmaker(bind=engine)
session = session_factory()


def selectDb(mapperObject):
    query = session.query(mapperObject).all()
    print(query)
    result_list = []
    for u in query:
        result_list.append(column_dict(u))
    print(str(result_list))
    session.commit()
    session.close()
    return str(result_list)


def column_dict(self):
    model_dict = dict(self.__dict__)
    del model_dict['_sa_instance_state']
    return model_dict


if __name__ == '__main__':
    data = selectDb(DbiShop)
    # kafka_addr = "192.168.12.33:19092,192.168.12.203:29092,192.168.12.204:9092"
    # producer = KafkaProducer(bootstrap_servers=kafka_addr
    #                          , key_serializer=str.encode('utf-8')
    #                          , value_serializer=lambda v: json.dumps(v).encode('utf-8'))
    # producer.send("dbi_province", data)
    #
    #
    # # connect to Kafka server and pass the topic we want to consume
    # consumer = KafkaConsumer('dbi_province', bootstrap_servers=kafka_addr)
    #
    # for msg in consumer:
    #     print(msg)
    kafka_addr = "192.168.12.33:19092,192.168.12.203:29092,192.168.12.204:9092"
    producer = KafkaProducer(bootstrap_servers=kafka_addr, value_serializer=lambda m: json.dumps(m).encode("utf-8"))
    producer.send('dbi_shop', data)
    producer.flush()
