#!/usr/bin/env python
# coding: utf-8
import web
from config import settings
from datetime import datetime
from config.settings import session
from StringIO import StringIO
import csv,sys
import authbase

render = settings.render
db = settings.db

class Index(authbase.AuthBase):
    def GET(self,page=1):
        page = int(page)
        perpage = settings.perpage_count
        offset = (page - 1) * perpage
        i = web.input()
        print i
        station_name = i.get('station_name', None)
        where_str=" 1=1 "
        if station_name:
            where_str=where_str+" and site='"+station_name+"'"
            selected_station_name=station_name
        else:
            selected_station_name=None
        start_date = i.get('dateStart', None)
        if start_date:
            where_str=where_str+" and collection_date >=timestamp '"+start_date+"'"
            selected_start_date=start_date
        else:
            selected_start_date=None
        end_date = i.get('dateEnd', None)
        if end_date:
            where_str=where_str+" and collection_date <=timestamp '"+end_date+"'"
            selected_end_date=end_date
        else:
            selected_end_date=None
        start_hour = i.get('start_hour', None)
        if start_hour:
            where_str=where_str+" and collection_hour >= '"+start_hour+"'"
            selected_start_hour=start_hour
        else:
            selected_start_hour=None
        end_hour = i.get('end_hour', None)
        if end_hour:
            where_str=where_str+" and collection_hour <= '"+end_hour+"'"
            selected_end_hour=end_hour
        else:
            selected_end_hour=None
        data_list =db.query("SELECT * FROM site_hour_data  where "+where_str+" limit "+str(perpage)+" offset "+str(offset))
        count = db.query("SELECT COUNT(*) AS count FROM site_hour_data where "+where_str)[0]
        pages = count.count / perpage
        if count.count % perpage > 0:
            pages += 1
        stations=list(db.select('stations'))
        return render.hour_data.index(data_list=data_list, pages=pages,current_page=page,stations=stations,count=count,\
                                      selected_station_name=selected_station_name,selected_start_date=selected_start_date,
                                      selected_end_date=selected_end_date,selected_start_hour=selected_start_hour,selected_end_hour=selected_end_hour)

class Export(authbase.AuthBase):
    def GET(self):
        i = web.input()
        station_name = i.get('station_name', None)
        where_str=" 1=1 "
        if station_name:
            where_str=where_str+" and site='"+station_name+"'"
            selected_station_name=station_name
        else:
            selected_station_name=None
        start_date = i.get('dateStart', None)
        if start_date:
            where_str=where_str+" and collection_date >=timestamp '"+start_date+"'"
            selected_start_date=start_date
        else:
            selected_start_date=None
        end_date = i.get('dateEnd', None)
        if end_date:
            where_str=where_str+" and collection_date <=timestamp '"+end_date+"'"
            selected_end_date=end_date
        else:
            selected_end_date=None
        start_hour = i.get('start_hour', None)
        if start_hour:
            where_str=where_str+" and collection_hour >= '"+start_hour+"'"
            selected_start_hour=start_hour
        else:
            selected_start_hour=None
        end_hour = i.get('end_hour', None)
        if end_hour:
            where_str=where_str+" and collection_hour <= '"+end_hour+"'"
            selected_end_hour=end_hour
        else:
            selected_end_hour=None
        reload(sys)
        sys.setdefaultencoding('utf-8')
        data_list =db.query("SELECT * FROM site_hour_data  where "+where_str+" order by collection_date ")
        file_name = station_name+u'每小时均值数据.csv'
        csv_file = StringIO()
        csv_writer = csv.writer(csv_file)
        csv_writer.writerow(['监测站','日期','时间','NO₂(μg/m³)', 'O₃(μg/m³)','PM10(μg/m³)','PM2.5(μg/m³)','CO(mg/m³)','SO₂(μg/m³)','湿度(%rh)',
                             '风向(a)','风速(m/s)','大气压(MPa)','温度(°C)'])
        for data in data_list:
            csv_writer.writerow([data.site,data.collection_date, data.collection_hour,data.avg_no2, data.avg_o3, data.avg_pm10, data.avg_pm25, \
                                 data.avg_co, data.avg_so2, data.avg_humidity,data.avg_wind_direction, data.avg_wind_speed, data.avg_atmosphere, data.avg_temperature])

        web.header("Content-Disposition", "attachment; filename=%s" % file_name)
        web.header("Content-Type", "text/csv")

        return csv_file.getvalue()



class MonthGroup(authbase.AuthBase):
    def GET(self,page=1):
        page = int(page)
        perpage = settings.perpage_count
        offset = (page - 1) * perpage
        i = web.input()
        print i
        station_name = i.get('station_name', None)
        where_str=" 1=1 "
        if station_name:
            where_str=where_str+" and site='"+station_name+"'"
            selected_station_name=station_name
        else:
            selected_station_name=None
        start_date = i.get('dateStart', None)
        if start_date:
            where_str=where_str+" and collection_month >= '"+start_date+"'"
            selected_start_date=start_date
        else:
            selected_start_date=None
        end_date = i.get('dateEnd', None)
        if end_date:
            where_str=where_str+" and collection_month <= '"+end_date+"'"
            selected_end_date=end_date
        else:
            selected_end_date=None
        start_hour = i.get('start_hour', None)
        if start_hour:
            where_str=where_str+" and collection_hour >= '"+start_hour+"'"
            selected_start_hour=start_hour
        else:
            selected_start_hour=None
        end_hour = i.get('end_hour', None)
        if end_hour:
            where_str=where_str+" and collection_hour <= '"+end_hour+"'"
            selected_end_hour=end_hour
        else:
            selected_end_hour=None
        data_list =db.query("SELECT * FROM v_site_hour_month_group_data  where "+where_str+" limit "+str(perpage)+" offset "+str(offset))
        count = db.query("SELECT COUNT(*) AS count FROM v_site_hour_month_group_data where "+where_str)[0]
        pages = count.count / perpage
        if count.count % perpage > 0:
            pages += 1
        stations=list(db.select('stations'))
        return render.hour_data.month_group(data_list=data_list, pages=pages,current_page=page,stations=stations,count=count,\
                                      selected_station_name=selected_station_name,selected_start_date=selected_start_date,
                                      selected_end_date=selected_end_date,selected_start_hour=selected_start_hour,selected_end_hour=selected_end_hour)

class MonthGroupExport(authbase.AuthBase):
    def GET(self):
        i = web.input()
        station_name = i.get('station_name', None)
        where_str=" 1=1 "
        if station_name:
            where_str=where_str+" and site='"+station_name+"'"
            selected_station_name=station_name
        else:
            selected_station_name=None
        start_date = i.get('dateStart', None)
        if start_date:
            where_str=where_str+" and collection_month >= '"+start_date+"'"
            selected_start_date=start_date
        else:
            selected_start_date=None
        end_date = i.get('dateEnd', None)
        if end_date:
            where_str=where_str+" and collection_month <= '"+end_date+"'"
            selected_end_date=end_date
        else:
            selected_end_date=None
        start_hour = i.get('start_hour', None)
        if start_hour:
            where_str=where_str+" and collection_hour >= '"+start_hour+"'"
            selected_start_hour=start_hour
        else:
            selected_start_hour=None
        end_hour = i.get('end_hour', None)
        if end_hour:
            where_str=where_str+" and collection_hour <= '"+end_hour+"'"
            selected_end_hour=end_hour
        else:
            selected_end_hour=None
        data_list =db.query("SELECT * FROM v_site_hour_month_group_data  where "+where_str+"  ")
        file_name = station_name+u'每时段月均值数据.csv'
        csv_file = StringIO()
        csv_writer = csv.writer(csv_file)
        csv_writer.writerow(['监测站','日期','时段','NO₂(μg/m³)', 'O₃(μg/m³)','PM10(μg/m³)','PM2.5(μg/m³)','CO(mg/m³)','SO₂(μg/m³)','湿度(%rh)',
                             '风向(a)','风速(m/s)','大气压(MPa)','温度(°C)'])
        for data in data_list:
            csv_writer.writerow([data.site,data.collection_month, data.collection_hour,data.avg_no2, data.avg_o3, data.avg_pm10, data.avg_pm25, data.avg_co, data.avg_so2, data.avg_humidity,\
                                 data.avg_wind_direction, data.avg_wind_speed, data.avg_atmosphere, data.avg_temperature])

        web.header("Content-Disposition", "attachment; filename=%s" % file_name)
        web.header("Content-Type", "text/csv")

        return csv_file.getvalue()