from elasticsearch import Elasticsearch
from pyspark.streaming import StreamingContext
from pyspark.sql.session import SparkSession
from pyspark.sql import SQLContext
from pyspark.streaming.kafka import KafkaUtils
import os
import json

os.environ['JAVA_HOME'] = "/usr/local/java/bin"
es = Elasticsearch(['192.168.0.221:9200'])

# ES查询数据
def search_data(es):
    res = es.search(index="my_store",doc_type="products")
    print(res)
    print(res['hits']['hits'])

# 往ES发送数据
# def send_data(es, data):
#     es.index(index="user_point",doc_type="test",body = data)

# 创建传入Es的数据
def create_send_data(lines):
    a=1
    for rec in lines:
        a=a+1
        x = "000"+str(a)
        y=x[-3:]
        es_dict = {}
        es_dict['timestamp'] = rec[0:10]
        es_dict['u_id'] = rec[10:14]
        es_dict['lat'] = rec[14:23]
        es_dict['lng'] = rec[23:32]
        es_dict['gps_timestamp'] = rec[32:42]
        es_dict['angle'] = rec[42:45]
        # send_data(es, es_dict)
        es.index(index="user_point", doc_type="test", id= y,body=es_dict)

# 处理从Kafka接收到的数据
def Dstream_opt(ssc):
    topic = "test0210"
    #创建direct连接，指定要连接的topic和broker地址
    ks = KafkaUtils.createDirectStream(ssc,[topic],{"metadata.broker.list":"192.168.0.221:9092"})
    #(None,内容)
    print("================kafka连接成功=====================")
    ks.pprint()

    #以下代码每操作一次，就打印输出一次
    lines = ks.map(lambda x:x[1])
    # lines.pprint()

    return lines


if __name__ == '__main__':
    # 实例化es
    print("===========================实例化es======================")
    # es = Elasticsearch(['192.168.0.221:9200'])

    # 创建SparkContext
    print("========================创建spark=========================")
    spark = SparkSession.builder.master("local[2]").getOrCreate()
    sc = spark.sparkContext
    ssc = StreamingContext(sc, 3)
    sqlContext = SQLContext(sc)
    # 获得接收到的Dstream
    lines = Dstream_opt(ssc)
    lines.pprint()

    lines.foreachRDD(lambda rdd: rdd.foreachPartition(create_send_data))



    ssc.start()
    #等待计算结束
    ssc.awaitTermination()

