from __future__ import with_statement

from google.appengine.ext import db, webapp
from google.appengine.ext.db import Key
from fantasm.action import FSMAction, DatastoreContinuationFSMAction
from logAction import LogAction, WebUser
from fantasm import fsm
from google.appengine.api import files
import pickle

class FilterResult(db.Model):
    action = db.IntegerProperty() # 0:click, 1:read, 2:change
    sourceIp = db.IntegerProperty()

class ResultBlobKey(db.Model):
    blobKey = db.StringProperty()

###############################################
# Start of State Machine 
# FilterTimespan
###############################################
class TimespanFilter(DatastoreContinuationFSMAction):
    def getQuery(self, context, obj):
        return LogAction.all()

    def execute(self, context, obj):
        if not obj['results']:
	    return None
        entities = []
        for rs in obj['results']:
	    if rs.timespan > 10.0:
                filter = FilterResult(action=rs.action, sourceIp=rs.sourceIp)
                entities.append(filter)
	context['filter'] = pickle.dumps(entities)
	return 'dataready'


class WriteFilterRstToDB(FSMAction):

    def execute(self, list_of_contexts, obj):
        rst = []
        for cxt in list_of_contexts:
            part_rst = pickle.loads(str(cxt['filter']))
	    rst.extend(part_rst)


	# store to blob file
        file_name = files.blobstore.create(mime_type='application/octet-stream')

        # Open the file and write to it
        with files.open(file_name, 'a') as f:
            f.write(pickle.dumps(rst))
        # Finalize the file. Do this before attempting to read it.
        files.finalize(file_name)
        # Get the file's blob key
        blob_key = files.blobstore.get_blob_key(file_name)
	db.put(ResultBlobKey(blobKey=str(blob_key)))
	

'''
class TimespanFilter(DatastoreContinuationFSMAction):
    def getQuery(self, context, obj):
        return LogAction.all().filter('timespan > ', 10.0)

    def execute(self, context, obj):
	if obj['results']:
	    entities = []
            for rs in obj['results']:
		if len(entities) == put_size:
		     db.put(entities)
		     entities = []
	        filter = FilterResult(action=rs.action, sourceIp=rs.sourceIp)
	        entities.append(filter)
	    if len(entities) > 0:
		db.put(entities)
'''
###############################################
# End of State Machine 
# FilterTimespan
###############################################

class TotalTimePerIp(db.Model):
    sourceIp = db.IntegerProperty()
    timespan = db.FloatProperty()

###############################################
# Start of State Machine 
# AggregateOnSourceIp
###############################################

class GroupSourIp(DatastoreContinuationFSMAction):
    def getQuery(self, context, obj):
        return LogAction.all()

    def execute(self, context, obj):
        if not obj['results']:
	    return None
	rs = obj['results']
	ip_time_dict = {}
	for entity in rs:
	    if int(entity.sourceIp) in ip_time_dict:
		ip_time_dict[int(entity.sourceIp)] += entity.timespan
	    else:
		ip_time_dict[int(entity.sourceIp)] = entity.timespan
	context['ip_time'] = pickle.dumps(ip_time_dict)
	import logging
	logging.info('---------- step 1 : ' + str(ip_time_dict))
	return 'ready'

	'''
	def txn(ip, time):
	    totalTime = TotalTimePerIp.get_by_key_name(str(ip))
	    if not totalTime:
		totalTime = TotalTimePerIp(key_name=str(ip),
						sourceIp=ip, timespan=time)
		totalTime.put()
	    else:
		totalTime.timespan += float(time)
		totalTime.put()
	db.run_in_transaction(txn, rs.sourceIp, rs.timespan)
	'''

class WriteIpTimeToDB(FSMAction):

    def execute(self, list_of_contexts, obj):
	import logging
	logging.info('--------- fan in size: ' + str(len(list_of_contexts)))
	time_dict = {}
	for cxt in list_of_contexts:
	    one_dict = pickle.loads(str(cxt['ip_time']))
	    logging.info('------------ step 2: ' + str(one_dict))
	    for ip,time in one_dict.items():
		if ip in time_dict:
		    time_dict[ip] += time
		else:
		    time_dict[ip] = time

	'''
	def txn(time_dict):
	    entities = []
	    for ip,time in time_dict.items():
		if len(entities) == put_size:
		    db.put(entities)
		    entities = []
                totalTime = TotalTimePerIp.get_by_key_name(str(ip))
                if not totalTime:
                    totalTime = TotalTimePerIp(key_name=str(ip),
                                                sourceIp=ip, timespan=time)
                    entities.append(totalTime)
                else:
                    totalTime.timespan += float(time)
                    entities.append(totalTime)
	    if len(entities) > 0:
                    db.put(entities)
	'''

	def txn(ip, time):
            totalTime = TotalTimePerIp.get_by_key_name(str(ip))
            if not totalTime:
                totalTime = TotalTimePerIp(key_name=str(ip),
                                                sourceIp=ip, timespan=time)
                totalTime.put()
            else:
                totalTime.timespan += float(time)
                totalTime.put()
	for ip, time in time_dict.items():
            db.run_in_transaction(txn, ip, time)	
	
###############################################
# End of State Machine 
# AggregateOnSourceIp
###############################################

class UserAction(db.Model):
    city = db.StringProperty()
    sex = db.IntegerProperty() # 0:female, 1:male
    timespan = db.FloatProperty()


class AllUser(DatastoreContinuationFSMAction):
    def getQuery(self, context, obj):
        return LogAction.all()

    def execute(self, context, obj):
        if not obj['results']:
            return None
        user_keys = [LogAction.user.get_value_for_datastore(log) for log in obj['results']]
        users_dict = dict((u.key(), u) for u in db.get(set(user_keys)))
        results = []
        for aLog, user_key in zip(obj['results'], user_keys):
	    aUser = users_dict[user_key]
            userAction = UserAction(city=aUser.city, sex=aUser.sex, timespan=aLog.timespan)
            results.append(userAction)
	'''
	import logging
       	logging.info('------------ results: ' + str(results)) 
	'''
	# store to blob file
	file_name = files.blobstore.create(mime_type='application/octet-stream')

	# Open the file and write to it
	with files.open(file_name, 'a') as f:
  	    f.write(pickle.dumps(results))
	# Finalize the file. Do this before attempting to read it.
	files.finalize(file_name)
	#logging.info('------------ finalize file')
	# Get the file's blob key
	blob_key = files.blobstore.get_blob_key(file_name)
	context['blob_key'] = str(blob_key)
	#logging.info('------------- blob key: ' + str(blob_key))
	return 'ready'

class StoreBlobKey(FSMAction):
    
    def execute(self, list_of_contexts, obj):
	'''
	import logging
	logging.info('-------------- fan in size: ' + str(len(list_of_contexts)))
	logging.info('--------------- contexts: ' + str(list_of_contexts))
	'''
	blob_keys = [ ResultBlobKey(blobKey=cxt['blob_key']) for cxt in list_of_contexts]
	db.put(blob_keys)	

'''
class AllUser(DatastoreContinuationFSMAction):
    def getQuery(self, context, obj):
        return LogAction.all()

    def execute(self, context, obj):
	if not obj['results']:
	    return None
	rc = obj['result']
	context['user_name'] = rc.name
 	context['user_city'] = rc.city
	context['user_sex'] = rc.sex
	#context['key_id'] = rc.key().id() 
	#context['key_kind'] = rc.key().kind()
	context['key'] = str(rc.key())
	return 'userAction'

class ActionPerUser(DatastoreContinuationFSMAction):
    def getQuery(self, context, obj):
        user_key = Key(context['key'])
	
	import logging
	logging.info('*************' + str(user_key))
	logging.info('*************' + context['key_kind'])
	logging.info('*************' + context['key_id'])
	
	return LogAction.all().filter('user =', user_key)

    def execute(self, context, obj):
        if obj['result']:
	    rs = obj['result']
	    userAction = UserAction(name=context['user_name'],
					city=context['user_city'],
					sex=(int)(context['user_sex']),
					timespan=rs.timespan)
	    userAction.put()
	
	else:
	    import logging
	    logging.info('************* null!!!!!!')
'''

class FTLogFilter(webapp.RequestHandler):
    def post(self):
	fsm.startStateMachine('FilterOnTimespan', [{}])
    	self.redirect('/')

class FTLogAggr(webapp.RequestHandler):
    def post(self):
        fsm.startStateMachine('AggregateOnSourceIp', [{}])
	self.redirect('/')

class FTLogJoin(webapp.RequestHandler):
    def post(self):
        fsm.startStateMachine('JoinUserAction', [{}])
	self.redirect('/')
