from kafka import KafkaProducer
from datetime import datetime
import json
import time

live_room_msg_file = open("query_result.csv")
gis_liveing_room_file = open("gis_liveing_room.json", "w")
gis_liveing_room_sql_file = open("gis_liveing_room.sql", "w")


def cvs_to_json(cvs):
    strs = cvs.split(",")
    d = {}
    d['roomid'] = strs[0]
    d['videorunning'] = strs[1]
    d['pcwaptype'] = strs[2]
    d['timestamp'] = long(strs[3])
    d['eventname'] = strs[4]
    d['userid'] = strs[5]
    return json.dumps(d)

tml = "insert into `gis_web_liveing_room` (`roomid`,`videorunning`,`pcwaptype`,`eventname`,`userid`,`timestamp`) values ('{0}','{1}','{2}','{3}','{4}','{5}');"
def cvs_to_sql(cvs):
    strs = cvs.split(",")
    d = {}
    d['roomid'] = strs[0]
    d['videorunning'] = strs[1]
    d['pcwaptype'] = strs[2]
    d['timestamp'] = datetime.fromtimestamp(long(strs[3])).strftime("%Y-%m-%d %H:%M:%S")
    d['eventname'] = strs[4]
    d['userid'] = strs[5]
    return tml.format(d['roomid'],d['videorunning'],d['pcwaptype'],d['eventname'],d['userid'],d['timestamp'])


# producer = KafkaProducer(bootstrap_servers='192.168.213.129:9092')
for line in live_room_msg_file:
    gis_liveing_room_file.write(cvs_to_json(line) + "\n")
    #gis_liveing_room_sql_file.write(cvs_to_sql(line) + "\n")
# producer.close()
gis_liveing_room_file.close()
gis_liveing_room_sql_file.close()
live_room_msg_file.close()