# -*- coding: utf-8 -*-
# __author__ = 'Yuanjiang Huang'
# yuanjiang.huang@socialcredits.cn


import sys, os
from pyspark import SparkConf,SparkContext
import pymongo_spark
pymongo_spark.activate()

os.environ['SPARK_HOME'] = "/home/scdev/spark-1.5.2/"
# Append pyspark  to Python Path
# sys.path.append("/home/openkai/social-credits/spark/spark-1.5.1/python/")
sys.path.append("/home/scdev/spark-1.5.2/python/")

CURRENT_PATH = os.path.dirname(__file__)
if CURRENT_PATH:
    CURRENT_PATH = CURRENT_PATH + '/'
# https://github.com/mongodb/mongo-hadoop/blob/master/spark/src/main/python/README.rst
# https://github.com/mongodb/mongo-hadoop/blob/master/spark/src/main/python/README.rst
# https://github.com/mongodb/mongo-hadoop
# bugs
# https://jira.mongodb.org/browse/HADOOP-244
# https://jira.mongodb.org/browse/HADOOP-243

def test():
	#ref. http://www.clarenceho.net/2015/12/apache-spark-with-mongodb-using-pymongo.html
	master_url = 'local[16]'
	# master_url = 'spark://sc-data-server-1:7077'
	conf = SparkConf()\
		.setMaster(master_url)\
		.setAppName('Demo Mongo')\
		# .set('spark.executor.extraClassPath', '/home/openkai/hadoop/share/hadoop/mapreduce/mongo-hadoop-spark-1.5.0-SNAPSHOT.jar')
	sc = SparkContext(conf=conf)
	# mongo_rdd = sc.mongoRDD('mongodb://192.168.31.121:27017/sc_web.apikey')
	mongo_rdd = sc.mongoRDD('mongodb://192.168.31.114:27017/crawler_company_all.companyinfo')
	rdd = mongo_rdd.map(lambda x:x)
	# print mongo_rdd.first()
	# rdd = mongo_rdd.map(lambda x : x[u'status'])
	# rdd2 = rdd.filter(lambda x:x == u'ACTIVE')
	print '*'*100
	print dir(mongo_rdd)
	print rdd.take(10)

	# for item in rdd.take(10):
	# 	print item.encode('utf-8')
	# # print (len(mongo_rdd.collect()))
	# cnt = 0
	# for item in mongo_rdd:
	# 	print cnt
	# 	cnt += 1
	print '*'*100

def ohlc(grouping):
	low = sys.maxint
	high = -sys.maxint
	i = 0
	groupKey = grouping[0]
	group = grouping[1]
	for doc in group:
	#take time and open from first bar
		if i == 0:
			openTime = doc["Timestamp"]
			openPrice = doc["Open"]

		if doc["Low"] < low:
			low = doc["Low"]

		if doc["High"] > high:
			high = doc["High"]
		i = i + 1
		# take close of last bar
		if i == len(group):
			close = doc["Close"]
		outputDoc = {"Symbol": groupKey[0], "Timestamp": openTime, "Open": openPrice, "High": high, "Low": low,"Close": close}
	return (None, outputDoc)

def test2():

	master_url = 'local'
	# master_url = 'spark://192.168.31.70:32789'
	conf = SparkConf().setMaster(master_url).setAppName('Demo')
	sc = SparkContext(conf=conf)
	# set up parameters for reading from MongoDB via Hadoop input format
	config = {"mongo.input.uri": "mongodb://192.168.31.121:27017/unit_sc_p2p.minbars"}
	inputFormatClassName = "com.mongodb.hadoop.MongoInputFormat"
	# these values worked but others might as well
	keyClassName = "org.apache.hadoop.io.Text"
	valueClassName = "org.apache.hadoop.io.MapWritable"

	# read the 1-minute bars from MongoDB into Spark RDD format
	minBarRawRDD = sc.newAPIHadoopRDD(inputFormatClassName, keyClassName, valueClassName, None, None, config)

	# configuration for output to MongoDB
	config["mongo.output.uri"] = "mongodb://192.168.31.121:27017/unit_sc_p2p.fiveminutebars"
	outputFormatClassName = "com.mongodb.hadoop.MongoOutputFormat"

	# takes the verbose raw structure (with extra metadata) and strips down to just the pricing data
	minBarRDD = minBarRawRDD.values()

	import calendar, time, math

	dateFormatString = '%Y-%m-%d %H:%M'

	# sort by time and then group into each bar in 5 minutes
	groupedBars = minBarRDD.sortBy(lambda doc: str(doc["Timestamp"])).groupBy(lambda doc: (doc["Symbol"], math.floor(calendar.timegm(time.strptime(doc["Timestamp"], dateFormatString)) / (5*60))))

	# define function for looking at each group and pulling out OHLC
	# assume each grouping is a tuple of (symbol, seconds since epoch) and a resultIterable of 1-minute OHLC records in the group

	# write function to take a (tuple, group); iterate through group; and manually pull OHLC


	resultRDD = groupedBars.map(ohlc)

	resultRDD.saveAsNewAPIHadoopFile('file:///'+CURRENT_PATH+"/placeholder", outputFormatClassName, None, None, None, None, config)

if __name__ == "__main__":
	test()
	# sys.exit(0)
