import smap.archiver.client
import time
import scipy.interpolate
#from numpy import *
import numpy as np
import csv
import calendar, datetime
from operator import itemgetter
from dateutil import tz
import time
import datetime

import icalendar
import urllib2
import urllib
import re

#a = array([[ 0,0 ],[ 46.2,66.3],[ 32.1,8.4],[ 16.7,4.8], [5.1,20.5]])
#print a
#b = array([[ 0.24],[ 0.23]])
#print b
#c = dot(a,b)
#print c
#print float(1)/2
#input stream ids
#key = 'WE4iJWG7k575AluJ9RJyAZs25UO72Xu0b4RA'
#c = smap.archiver.client.SmapClient(base='http://new.openbms.org/backend')
c = smap.archiver.client.SmapClient(base='http://new.openbms.org/backend', key=['WE4iJWG7k575AluJ9RJyAZs25UO72Xu0b4RA','SA2nYWuHrJxmPNK96pdLKhnSSYQSPdALkvnA'])
#smapData = c.data_uuid([u'109195dd-07d1-5b03-aa18-2551d4bd8e31', u'91869983-5a50-5d88-bc5e-38b0caa90748', u'6a18f8ba-42fd-547d-9d38-8b3bbebd9242', u'cbbc33a4-27f8-5241-a905-59c9dfa1ecfb'],  1317484980, 1317485100)
#print smapData[0][:,0]
#uuids = ['950b356e-df32-570a-9c78-b3ebb19d6f1a']
#uuids = ["f432d4b4-a897-5a54-bdb3-a107e5ea14d9","fe6b60de-7167-5218-9644-c06746986efb","285b111c-aae2-518c-b61e-f2120e32c4aa","8d7c9538-bfad-5546-88e6-7a88774bfe97"]
#queryS = 'apply window(first, field=\"minute\", width=15) to data in  (1340409600000,1343151480000) where uuid = '
        #to data in (' + str(post_data['start_date']) + ',' + str(post_data['end_date']) + ')
#for i in xrange(len(uuids)):
#    if i == 0:
#        queryS += '\"' + uuids[i] + '\"'
#    else:
#        queryS += ' or uuid = \"' + uuids[i] + '\"'
        #data = c.query('apply window(first, field=\"minute\", width=15) to data in (\"4/17/2012\", \"4/19/2012\") where uuid = \"f432d4b4-a897-5a54-bdb3-a107e5ea14d9\" or uuid=\"fe6b60de-7167-5218-9644-c06746986efb\"')
#data = c.query('select data in (\"1/30/2013\",\"2/10/2013\") where Path = \"/ming-test0/sensor5\"')
#data = c.query('apply window(mean, field="second", width=900) to data in (1352188800000,353398400000) limit 1000000000 where uuid = "ed1ee42f-cc16-56b5-902a-ba8738ade681" or uuid = "61ea26b0-5fb3-50eb-9e89-df9b7cd60ee4"')
data = c.query('apply mean < <= 110 to data in (1349776680000,1349780280000) limit 10000000 where uuid = "98768115-f241-5e9b-930f-603f44d42286"')
print data
#print datetime.datetime.fromtimestamp(data[0]['Readings'][0][0]/1000)
#top_data = c.query('apply window(mean, field=\'minute\',width=60) to data in (1348642800000,1349334000000) limit -1 where uuid = \'8fffb16c-bedb-502b-bb90-5f8f1c88ca62\'')
#dataGlb = np.array(top_data[0]['Readings'])
#dataTop = []
#for i in xrange(len(dataGlb)):
    #top = (globetemp(FtoC(dataDbt[i][1]), 0.15, FtoC(dataGlb[i][1]), 0.038, 0.95) + dataDbt) / 2
#    dataTop.append([dataGlb[i][0], dataGlb[i][1]])
#print np.array(dataTop)
#top_data = array(top_data[0]['Readings'])
#run average day
#start_hour = datetime.fromtimestamp(top_data[0][0]/1000).hour
#end_hour = (datetime.fromtimestamp(top_data[0][0]/1000) + timedelta(hours=23)).hour
#average_day = []
#day_data = array(0)
#day_data = [[] for i in range(24)]
#day_data[0].append(2)
#day_data[1].append(5)
#print day_data

#day_count = 0
#errors = []
#for i in xrange(0,len(top_data),24):
##    try:
#        if datetime.fromtimestamp(top_data[i][0]/1000).hour == start_hour and datetime.fromtimestamp(top_data[i+23][0]/1000).hour == end_hour: #make sure full day
##            day_data = day_data + top_data[i:i+24]
#            day_count += 1
#        else:
#            errors.append(top_data[i:i+24][0])
#    except:
#        break
#print day_data
#print errors
#print day_data/day_count

#outliers = []
#bp_data = [[4,5,5,5,5,[9,4]]]
#for y in xrange(len(bp_data)):
#    outliers.append((elem) for elem in bp_data[y][5])
#print outliers


        
#print data
#print queryS
#data = c.query(queryS)
#print smapData
#c = smap.archiver.client.SmapClient(base='http://new.openbms.org/backend', key=['WE4iJWG7k575AluJ9RJyAZs25UO72Xu0b4RA','SA2nYWuHrJxmPNK96pdLKhnSSYQSPdALkvnA'])
#projectName = 'sdh'
#uuids = c.query('select distinct uuid where Path like \'/%s/' % projectName + '%\'')
#initialData = []
#for uuid in uuids:
#    path = c.query('select distinct Path where uuid = \'%s\'' % uuid)
#    initialData.append({'uuid':uuid, 'path':path[0]})
#initialData = sorted(initialData, key = itemgetter('path'))
#for data in initialData:
#    print data['path']

#time_zone = tz.gettz('America/Los_Angeles')
#time_zone_utc = tz.gettz('UTC')
#print time_zone

#dt_format = '%Y-%m-%d %H:%M'
#start_date = datetime.fromtimestamp(1347512640, time_zone)
#print start_date
#start_date = datetime.fromtimestamp(1339440660, time_zone)
#print time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(1339440660))
#delta = time_zone.utcoffset()
#print (delta.days * 86400 + delta.seconds) / 60 / 60

#cal = icalendar.Calendar.from_ical(open('/home/cbe/USHolidays.ics','rb').read())
#for component in cal.walk('VEVENT'):
#    print [component.decoded('DTSTART'),component.decoded('DTEND')]

#print datetime.time()

#openlayers_fp = open('/opt/powerdb2/staticStore/userData/3/3/openlayers.html', 'r')
#fp_contents = openlayers_fp.read()
#t = re.search('OpenLayers.Bounds\( (.*)\)',fp_contents)
#print t.group(1).replace(' ','').split(',')

#t2 = re.search('maxResolution: (.*),', fp_contents)
#print t2.group(1)