from pyspark import SparkContext, SparkConf
import pymongo
from pyspark.sql.session import SparkSession
import re

conf = SparkConf().setMaster("local").setAppName("DataTransform-director")
sc = SparkContext(conf=conf)
spark = SparkSession.builder.appName('DataTransform-director').getOrCreate()
conn = pymongo.MongoClient("mongodb://47.93.220.108:27017")['movie']['director']
ll = []
a = conn.find({}, {'_id': 0})
for i in a:
    ll.append(i)
rdd = sc.parallelize(ll)
df = spark.createDataFrame(rdd)

from elasticsearch import Elasticsearch

es = Elasticsearch(hosts='106.13.117.37', port='9200')

for i in df.collect():
    data = i.asDict()
    data['key-name'] = data['name']
    a = es.index('director', body=data)


