# -*- coding: utf-8 -*-


from netCDF4 import Dataset
import numpy as np
import pandas as pd
import pandas.json as _json
import fg_solar_sat_common as fg_solar
import datasets.utils.data as fg_data
from datasets.utils.fc_managers import *

from datasets.utils.fg_dataset_mold_base import *
from metlib.datetime import TD
from datakeeper.dataset_mold import DatasetMoldBase, GetFileError
from metlib.datetime import parse_datetime


class FG_Solar_Sat_DatasetBase(FG_DatasetMoldBase):
    uri_fields = ['dataset', 'subset', 'varname', 'time', 'lon_lat']
    uri_parser = fg_solar.parse_solar_sat_uri
    allow_RY_download_json = True
    zh_subset_d = fg_solar.solar_sat_zh_subset_d
    zh_varname_d = fg_solar.solar_sat_varname_d

    def __init__(self, name, uri, info, *args, **kwargs):
        super(FG_Solar_Sat_DatasetBase, self).__init__(name, uri, info, *args, **kwargs)
        self.begdt = parse_datetime(info.get('begdt'))
        self.enddt = parse_datetime(info.get('enddt'))
        self.years = info.get('years')
        dataset_zh_name = fg_solar.solar_sat_zh_names.get(self.name, self.name)
        self.zh_name = info.get('zh_name', dataset_zh_name)
        self.root_path = kwargs.get('root_path', '')


class FG_Solar_Sat_PM_Dataset(FG_Solar_Sat_DatasetBase):
    def __init__(self, name, uri, info, *args, **kwargs):
        super(FG_Solar_Sat_PM_Dataset, self).__init__(name, uri, info, *args, **kwargs)
        self.varnames = fg_solar.solar_sat_pm_varnames
        self.uri_field_values = {
            'varname': self.varnames
        }

    @property
    def schema(self):
        res = {
            'arch': 'subset',
            'name': 'PM',
            'type': 'SV',
            'subs': []
        }
        for vn in fg_solar.solar_sat_pm_varnames:
            res['subs'].append({
                'arch': 'variable',
                'name': vn,
                'type': 'V',
                'datatype': 'monthlystat',
                'vartype': 'T',
                'zipped': False,
                'formats': ['json', 'hc_figure']
            })
        return res

    @cache_data
    @pack_data
    @perm_data
    def get_data(self, uri, request=None, *args, **kwargs):
        datatype = kwargs.get('datatype', 'dataunit')
        uri_info = kwargs.get('uri_info', None)
        sample = kwargs.get('sample', False)

        if uri_info is None:
            uri_info = fg_solar.parse_solar_sat_uri(uri)

        try:
            # try:
            #     if uri_info['timezone'] != 0:
            #         utc_offset = uri_info['timezone']
            #     else:
            #         utc_offset = int(kwargs.get('utc_offset', 8))
            #     toffset = TD('%sh' % utc_offset)
            # except ValueError as e:
            #     raise fg_data.BadDataParameter(unicode(e))
            #
            # if utc_offset != 0:
            #     uri_info['begdt'] -= toffset
            #     uri_info['enddt'] -= toffset

            if datatype == 'dataunit':
                if sample:
                    # subset_uri = uri_info['subset'] + '_sample'
                    subset_uri = uri_info['subset']
                else:
                    subset_uri = uri_info['subset']
                dataset = uri_info['dataset']
                varname = uri_info['varname']
                time = uri_info['time']
                units = fg_solar.solar_sat_units_d.get(varname, '')

                s3_uri = '%s/%s/%s/rad.nc' % (dataset, subset_uri, time)
                fc = raw_medium_fc_manager.get(s3_uri, when_not_exist=['fetch'])
                pm = Dataset(fc.filepath)
                months = pm.variables['tag'][:]
                sum_values = pm.variables['sum'][:,uri_info['jy'],uri_info['ix']] / 100 / 3.6
                mean_values = pm.variables['mean'][:,uri_info['jy'],uri_info['ix']] / 100 / 3.6
                std_values = pm.variables['std'][:,uri_info['jy'],uri_info['ix']] / 100 / 3.6
                counts = pm.variables['count'][:]
                # part_df = pd.DataFrame({varname: sum_values}, index=years)
                # part_df.index.name = 'datetime'
                pm.close()

                res = {
                    'type': 'dataunit',
                    'uri': uri,
                    'info': {
                        'varname': varname,
                        'zh_varname': fg_solar.solar_sat_varname_d.get(varname, varname),
                        'type': 'monthlymean',
                        'sample': sample,
                    },
                    'data': {
                        'values': sum_values.tolist(),
                        'count': counts.tolist(),
                        'std': std_values.tolist(),
                        'mean': mean_values.tolist(),
                        'stat_method': 'sum',
                        'units': fg_solar.solar_sat_units_d.get(varname)
                    }
                }


            # if sample:
            #     # 样本策略
            #     pass
            #
            # final_pack = {
            #     'type': 'datapack',
            #     'uri': uri,
            #     'tags': {
            #         uri: uri
            #     },
            #     'contents': {
            #         uri: {
            #             'type': 'dataunit',
            #             'uri': uri,
            #             'info': {
            #                 'varname': varname,
            #                 'zh_varname': fg_solar.solar_sat_varname_d.get(varname, varname),
            #                 'type': 'monthlymean',
            #                 'sample': sample,
            #             }
            #         }
            #     }
            # }
            #
            # # sub_varnames
            # final_data = {
            #     'values': sum_values.tolist(),
            #     'count': counts.tolist(),
            #     'std': std_values.tolist(),
            #     'mean': mean_values.tolist(),
            #     'units': fg_solar.solar_sat_units_d.get(varname)
            # }
            # final_pack['contents'][uri]['data'] = final_data
            #
            # if datatype == 'dataunit':
            #     res = final_pack.get('contents')
            # elif datatype == 'datapack':
            #     res = final_pack
            # else:
            #     res = {}
        except Exception as e:
            raise e
        return res

    @support_packunitjson
    def get_file(self, uri, dest, request=None, *args, **kwargs):
        pass


class FG_Solar_Sat_PY_Dataset(FG_Solar_Sat_DatasetBase):
    def __init__(self, name, uri, info, *args, **kwargs):
        super(FG_Solar_Sat_PY_Dataset, self).__init__(name, uri, info, *args, **kwargs)

    @property
    def schema(self):
        res = {
            'arch': 'subset',
            'name': 'PY',
            'type': 'SV',
            'subs': []
        }
        for vn in fg_solar.solar_sat_py_varnames:
            if vn == 'summary':
                formats = ['json']
            else:
                formats = ['json', 'hc_figure']
            res['subs'].append({
                'arch': 'variable',
                'name': vn,
                'type': 'V',
                'datatype': 'yearlystat',
                'vartype': 'T',
                'zipped': False,
                'formats': formats
            })
        return res

    @cache_data
    @pack_data
    @perm_data
    def get_data(self, uri, request=None, *args, **kwargs):
        datatype = kwargs.get('datatype', 'dataunit')
        uri_info = kwargs.get('uri_info', None)
        sample = kwargs.get('sample', False)

        if uri_info is None:
            uri_info = fg_solar.parse_solar_sat_uri(uri)

        try:
            # try:
            #     if uri_info['timezone'] != 0:
            #         utc_offset = uri_info['timezone']
            #     else:
            #         utc_offset = int(kwargs.get('utc_offset', 8))
            #     toffset = TD('%sh' % utc_offset)
            # except ValueError as e:
            #     raise fg_data.BadDataParameter(unicode(e))
            #
            # if utc_offset != 0:
            #     uri_info['begdt'] -= toffset
            #     uri_info['enddt'] -= toffset
            if datatype == 'dataunit':
                if sample:
                    # subset_uri = uri_info['subset'] + '_sample'
                    subset_uri = uri_info['subset']
                else:
                    subset_uri = uri_info['subset']
                dataset = uri_info['dataset']
                varname = uri_info['varname']
                time = uri_info['time']
                units = fg_solar.solar_sat_units_d.get(varname, '')

                s3_uri = '%s/%s/%s/rad.nc' % (dataset, subset_uri, time)
                fc = raw_medium_fc_manager.get(s3_uri, when_not_exist=['fetch'])
                py = Dataset(fc.filepath)
                years = py.variables['tag'][:]
                mean_values = py.variables['mean'][:,uri_info['jy'],uri_info['ix']] / 100 / 3.6
                std_values = py.variables['std'][:,uri_info['jy'],uri_info['ix']] / 100 / 3.6
                sum_values = py.variables['sum'][:,uri_info['jy'],uri_info['ix']] / 100 / 3.6
                counts = py.variables['count'][:]
                # part_df = pd.DataFrame({varname: sum_values}, index=years)
                # part_df.index.name = 'datetime'
                py.close()

                final_unit = {
                    'type': 'dataunit',
                    'uri': uri,
                    'info': {
                        'varname': varname,
                        'zh_varname': fg_solar.solar_sat_varname_d.get(varname, varname),
                        'type': 'solar_sat_summary',
                        'sample': sample,
                        'sub_varnames': fg_solar.solar_sat_py_sub_varnames
                    }
                }

                # sub_varnames
                final_data = {}
                for sub_varname in fg_solar.solar_sat_py_sub_varnames:
                    final_data.setdefault(sub_varname, {
                        'name': sub_varname,
                        'zh_name': fg_solar.solar_sat_varname_d.get(sub_varname, sub_varname),
                        'values': sum_values.tolist(),
                        'count': counts.tolist(),
                        'std': std_values.tolist(),
                        'mean': mean_values.tolist(),
                        'stat_method': 'sum',
                        'units': fg_solar.solar_sat_units_d.get(sub_varname)
                    })
                final_unit['data'] = final_data
                res = final_unit
        except Exception as e:
            raise e
        return res

    @support_packunitjson
    def get_file(self, uri, dest, request=None, *args, **kwargs):
        pass


class FG_Solar_Sat_PT_Dataset(FG_Solar_Sat_DatasetBase):
    def __init__(self, name, uri, info, *args, **kwargs):
        super(FG_Solar_Sat_PT_Dataset, self).__init__(name, uri, info, *args, **kwargs)

    @property
    def schema(self):
        res = {
            'arch': 'subset',
            'name': 'PT',
            'type': 'SV',
            'subs': [],

        }
        for vn in fg_solar.solar_sat_pt_varnames:
            res['subs'].append({
                'arch': 'variable',
                'name': vn,
                'type': 'V',
                'datatype': 'timeseries',
                'vartype': 'T',
                'zipped': False,
                'formats': ['json', 'csv', 'hc_figure'],
                'coords': [{
                    'default': sorted(self.years)[-1:],
                    'name': 'time',
                    'values': self.years,
                    'can_poly': False,
                }, {
                    "default": "<%= jy_ix_default %>",
                    "name": "jy_ix",
                    "values": "<%= jy_ix_values %>",
                }],
            })
        return res

    @cache_data
    @perm_data
    def get_data(self, uri, request=None, *args, **kwargs):
        datatype = kwargs.get('datatype', 'datapack')
        sample = kwargs.get('sample', False)
        uri_info = kwargs.get('uri_info', None)
        if uri_info is None:
            uri_info = fg_solar.parse_solar_sat_uri(uri)

        try:
            try:
                if uri_info['timezone'] != 0:
                    utc_offset = uri_info['timezone']
                else:
                    utc_offset = int(kwargs.get('utc_offset', 8))
                toffset = TD('%sh' % utc_offset)
            except ValueError as e:
                raise fg_data.BadDataParameter(unicode(e))

            if utc_offset != 0:
                uri_info['begdt'] -= toffset
                uri_info['enddt'] -= toffset
            if sample:
                # subset_uri = uri_info['subset'] + '_sample'
                subset_uri = uri_info['subset']
            else:
                subset_uri = uri_info['subset']
            dataset = uri_info['dataset']
            varname = uri_info['varname']
            time = uri_info['time']
            units = fg_solar.solar_sat_units_d.get(varname, '')

            s3_uri = '%s/%s/%s/rad.nc' % (dataset, subset_uri, time)
            fc = raw_medium_fc_manager.get(s3_uri, when_not_exist=['fetch'])
            pt = Dataset(fc.filepath)
            date_strings = pt.variables['date'][:]
            rad_values = pt.variables['rad'][:,uri_info['jy'],uri_info['ix']] / 100 / 3.6
            final_df = pd.DataFrame({varname: rad_values}, index=date_strings)
            final_df.index.name = 'datetime'
            pt.close()

            if sample:
                # 样本策略
                pass

            final_pack = {
                'type': 'datapack',
                'uri': uri,
                'tags': {
                    uri: uri
                },
                'contents': {
                    uri: {
                        'type': 'dataunit',
                        'uri': uri,
                        'info': {
                            'varname': varname,
                            'zh_varname': fg_solar.solar_sat_varname_d.get(varname, varname),
                            'type': 'timeseries',
                            'sample': sample,
                        }
                    }
                }
            }

            dts = map(parse_datetime, date_strings)
            begdt = dts[0]
            interval = (dts[1] - dts[0]).total_seconds()
            final_data = {
                'dts': dts,
                'begdt': begdt,
                'interval': interval,
                'values': rad_values.tolist(),
                'units': fg_solar.solar_sat_units_d.get(varname),
                'suggest_range': fg_solar.solar_sat_suggest_range_d.get(varname)
            }
            final_pack['contents'][uri]['data'] = final_data

            if datatype == 'dataunit':
                res = final_pack.get('contents')
            elif datatype == 'datapack':
                res = final_pack
            elif datatype == 'dataframe':
                res = final_df
            else:
                res = {}
        except Exception as e:
            raise e
        return res

    @support_csv
    @support_packunitjson
    def get_file(self, uri, dest, request=None, *args, **kwargs):
        pass


class FG_Solar_Sat_RY_Dataset(FG_Solar_Sat_DatasetBase):
    def __init__(self, name, uri, info, *args, **kwargs):
        super(FG_Solar_Sat_RY_Dataset, self).__init__(name, uri, info, *args, **kwargs)
        self.varnames = fg_solar.solar_sat_ry_varnames
        self.uri_fields_values = {
            'varname': self.varnames
        }

    @property
    def schema(self):
        res = {
            'arch': 'subset',
            'name': 'RY',
            'type': 'SV',
            'subs': []
        }
        for vn in self.varnames:
            res['subs'].append({
                'arch': 'variable',
                'name': vn,
                'vartype': 'T',
                'datatype': 'yearlystat',
                'formats': ['json', 'rect_figure', 'kmz'],
                'zipped': False,
                'coords': [{
                    'name': 'time',
                    'values': self.years,
                    'default': [self.years[-1]]
                }, {
                    'name': 'jy_ix',
                    'values': '<%= jy_ix_values %>',
                    'default': '<%= jy_ix_values %>',
                }]
            })
        return res

    @cache_data(valid_params={'dataunit': ['datatype', 'user']})
    @provide_figinfo(suggest_cmap_dict=fg_solar.solar_sat_cmap_d)
    @perm_data
    def get_data(self, uri, request=None, *args, **kwargs):
        datatype = kwargs.get('datatype', 'dataunit')
        uri_info = kwargs.pop('uri_info', None)
        sample = kwargs.get('sample', False)
        if uri_info is None:
            uri_info = fg_solar.parse_solar_sat_uri(uri)

        try:
            varname = uri_info['varname']
            jy_ix = uri_info['jy_ix']

            subset_uri = 'PY'
            if sample:
                # subset_uri = uri_info['subset'] + '_sample'
                pass

            s3uri = '%s/%s/%s/rad.nc' % (
                uri_info['dataset'],
                subset_uri,
                uri_info['time'],
            )
            fc = raw_medium_fc_manager.get(s3uri, when_not_exist=['fetch'])
            filename = fc.filepath
            jy1 = uri_info['jy1']
            jy2 = uri_info['jy2']
            ix1 = uri_info['ix1']
            ix2 = uri_info['ix2']

            py = Dataset(filename)
            data = py.variables['sum'][0, jy1:jy2, ix1:ix2] / 100 / 3.6
            lon = py.variables['lon'][jy1:jy2, ix1:ix2]
            lat = py.variables['lat'][jy1:jy2, ix1:ix2]
            py.close()

            res = {
                uri: uri,
                'type': 'dataunit',
                'info': {
                    'varname': varname,
                    'type': 'rect_yearlystat',
                    'sample': sample
                },
                'coords': {
                    'dataset': uri_info['dataset'],
                    'subset': uri_info['subset'],
                    'varname': uri_info['varname'],
                    'time': uri_info['time'],
                    'jy_ix': uri_info['jy_ix']
                },
                'data': {
                    'lon': lon[0,:],
                    'lat': lat[:,0],
                    'values': data,
                    'stat_method': 'sum',
                    'suggest_range': fg_solar.solar_sat_suggest_range_d.get(varname, (None, None)),
                    'units': fg_solar.solar_sat_units_d.get(varname, '')
                }
            }

        except Exception as e:
            raise e
        return res

    def get_file(self, uri, dest, request=None, *args, **kwargs):
        return self.RY_get_file(uri, dest, request=request, *args, **kwargs)


class FG_Solar_Sat_Dataset(FG_Solar_Sat_DatasetBase):
    mold_name = 'Solar_Sat'
    subset_classes = [
        {'name': 'PointYearlyStat', 'uri':'PY', 'class': FG_Solar_Sat_PY_Dataset},
        {'name': 'PointMonthlyStat', 'uri':'PM', 'class': FG_Solar_Sat_PM_Dataset},
        {'name': 'PointTimeseries', 'uri':'PT', 'class': FG_Solar_Sat_PT_Dataset},
        {'name': 'RectYearlyStat', 'uri':'RY', 'class': FG_Solar_Sat_RY_Dataset}
    ]
    tags = ('solar', 'solar_sat', 'meteorology')

    def __init__(self, name, uri, info, *args, **kwargs):
        super(FG_Solar_Sat_Dataset, self).__init__(name, uri, info, *args, **kwargs)
        self.desc = info.get('desc', info.get('zh_name', name))

    @property
    def schema(self):
        res = {
            'arch': 'dataset',
            'mold': self.mold_name,
            'type': 'DSV',
            'name': self.name,
            'zh_name': self.zh_name,
            "desc": self.desc,
            'common': {
                'coords': [{
                    'default': sorted(self.years)[-1:],
                    'name': 'time',
                    'values': self.years,
                    'can_poly': True,
                }, {
                    "default": "<%= jy_ix_default %>",
                    "name": "jy_ix",
                    "values": "<%= jy_ix_values %>",
                }],
            },
            'subset_dict': fg_solar.solar_sat_zh_subset_d,
            'varname_dict': fg_solar.solar_sat_varname_d,
            'subs': [],
        }
        for class_info in self.subset_classes:
            sub_schema = self.uri_subsets[class_info['uri']].schema
            res['subs'].append(sub_schema)
        return res

    def lookup(self, info, request=None, *args, **kwargs):
        results = []
        # sat_info = fg_solar.sat_lonlat_range.get(self.name)
        try:
            if info['type'] == 'point':
                i, j = fg_solar.lonlat_to_ij(float(info.get('lon')), float(info.get('lat')), self.info)
                jy_ix = '%s_%s' % (j, i)
                results.append({
                    'dataset': self.name,
                    'subsets': ('PY', 'PM', 'PT'),
                    'jy_ix_values': [jy_ix],
                    'jy_ix_default': [jy_ix],
                })
            elif info['type'] == 'rect':
                lon1 = float(info.get('lon1'))
                lat1 = float(info.get('lat1'))
                lon2 = float(info.get('lon2'))
                lat2 = float(info.get('lat2'))
                rect_ij = fg_solar.get_sat_rect(lon1, lat1, lon2, lat2, self.info)
                results.append({
                    'dataset': self.name,
                    'subsets': ['RY'],
                    'jy_ix_values': [rect_ij.get('jy_ix')],
                    'jy_ix_default': [rect_ij.get('jy_ix')],
                })
            else:
                pass
        except Exception as e:
            raise LookupError(u'[%s] %s') % (type(e), unicode(e))
        return results

    @protect_hack
    def get_file(self, uri, dest, request=None, *args, **kwargs):
        try:
            uri_info = fg_solar.parse_solar_sat_uri(uri)
            subset = self.uri_subsets.get(uri_info['subset'], None)
            if subset is None:
                raise ValueError("No such subset in Merra: %s" % uri_info['subset'])

            getres = subset.get_file(uri, dest, uri_info=uri_info, request=request, *args, **kwargs)
            return getres
        except Exception as e:
            raise GetFileError(u'[%s] %s' % (type(e), unicode(e)))
