# -*- coding: utf-8 -*-
"""
Created on 2022/2/24

@author: Song
"""

import plotnine as G
import altair as alt
import streamlit as st
from vika import Vika

from DBTestAnalysisLib import *

vika = Vika("uskaYYRdtqFoPqcVw3KM2Kf")

st.set_page_config(page_title="空间占用&数据库状态", page_icon="📈", layout="wide")
content_to_show = st.sidebar.radio(
    "表格显示内容?",
    ("最近一次测试日期", "最近一次测试耗时", "测试总数")
)

def hashDefaultGetFunc(item, key):
    return item[key]


def lcGetFunc(item, key):
    return item.get(key)


def g(row, key):
    return row[key] if key in row else None


class HashResult:
    def __init__(self, data, keys, getFunc=hashDefaultGetFunc):
        self._data = data
        self._keys = keys
        self._cache = {}
        for item in data:
            cache_key = '_'.join(list(map(lambda k: getFunc(item, k), keys)))
            if cache_key in self._cache:
                self._cache[cache_key].append(item)
            else:
                self._cache[cache_key] = [item]

    def get(self, *args):
        cache_key = '_'.join(args)
        if cache_key in self._cache:
            return self._cache[cache_key]
        else:
            return []


def milestones():
    metaList = []
    for dataset in ['energy', 'traffic', 'syn']:  #
        for db in ['TGC', 'TGK', 'TGKI', 'TGS', 'N1', 'N2', 'PG', 'MA']:
            for mSize in ['.1', '.all']:  # '.01', '.5', '.9',
                metaList.append({'db': db, 'dataset': dataset, 'tp_size': mSize})
            if db == 'N1' or db == 'N2':
                metaList.append({'db': db, 'dataset': dataset, 'tp_size': '.0'})
    meta = pd.DataFrame(metaList)
    return meta

def lcMilestoneBuild(meta):
    query = leancloud.Query('TestMilestone')
    query.not_equal_to('extra', 'deploy')
    query.exists('duration')
    query.add_descending('createdAt')
    query.limit(999)
    objList = query.find()
    cache = HashResult(objList, ['DB', 'Dataset', 'MSize'], lcGetFunc)

    # st.write(cache._cache)

    def mBuildInfo(row):
        dataset = row['dataset']
        db = row['db']
        tp_size = row['tp_size']
        buildInfo = cache.get(db.upper(), dataset, 't' + tp_size)
        if len(buildInfo) > 0:
            # validaHistoryBuildT = list(filter(lambda obj: 'duration' in obj and obj['duration'] is not None, buildInfo))
            # # st.write(validaHistoryBuildT)
            historyBuildT = list(map(lambda obj: obj.get('duration'), buildInfo))
            medianBuildTime = np.median(historyBuildT)
            m = buildInfo[0]
            return [m.updated_at.strftime("%m-%d %H:%M"), m.get('duration'), len(buildInfo), medianBuildTime]
        else:
            return [None, None, None, None]

    meta[['lc_m_ct',
          'lc_m_dur_t',
          'lc_m_his_cnt',
          'lc_m_his_dur_median']] = meta.apply(mBuildInfo, axis=1, result_type="expand")
    return meta


def vkMilestoneBuild(meta):
    buildInfo = vika.datasheet("dstSkZb56uR8XsulbF", field_key="name")
    records = list(map(lambda x: x.json(), buildInfo.records.all()))
    cache = HashResult(records, ['dataset', 'db', 'tpSize'])

    def extractor(rowDict):
        builds = cache.get(rowDict['dataset'], rowDict['db'].upper(), 'T' + rowDict['tp_size'])
        if len(builds) > 0:
            item = builds[0]
            ct = datetime.fromtimestamp(int(item['创建时间'] / 1000)).strftime("%m-%d %H:%M")
            status = item['status'] if 'status' in item else item['phase']
            if len(builds) > 1:
                pre = builds[1]
                pct = datetime.fromtimestamp(int(pre['创建时间'] / 1000)).strftime("%m-%d %H:%M")
                pstatus = pre['status'] if 'status' in pre else pre['phase']
            else:
                pct = None
                pstatus = None
            return [ct, status, pct, pstatus]
        else:
            return [None, None, None, None]

    meta[['vk.m_ct', 'vk.m.status', 'vk.m.pre.ct', 'vk.m.pre.status']] = meta.apply(extractor, axis=1,
                                                                                    result_type="expand")
    return meta


def vkMilestoneDeploy(meta):
    deployInfo = vika.datasheet("dst5kWxaj8ciX8kJei", field_key="name")
    records = list(map(lambda x: x.json(), deployInfo.records.all()))
    cache = HashResult(records, ['dataset', 'db', 'tpSize'])

    # st.write('done--------------')

    def extractor(rowDict):
        deploys = cache.get(rowDict['dataset'], rowDict['db'].lower(), rowDict['tp_size'][1:])
        if len(deploys) > 0:
            item = deploys[0]
            ct = datetime.fromtimestamp(int(item['创建时间'] / 1000)).strftime("%m-%d %H:%M")
            status = item['status'] if 'status' in item else item['phase']
            return [ct, status]
        else:
            return [None, None]

    meta[['vk_deploy_ct', 'vk_deploy_status']] = meta.apply(extractor, axis=1, result_type="expand")
    return meta
    # toPrint1 = meta.pivot(index=['dataset', 'tp_size'], columns='system', values='deploy')
    # st.dataframe(toPrint1.style.highlight_null(), use_container_width=True)

def lt_red(val):
    if val < 0.2:
        color = 'red'
    else:
        color = 'black'
    return ('color:%s' % color)


def calcStatus(row):
    if 'lc_m_ct' in row and row['lc_m_ct'] is not None:
        lc = row['lc_m_ct']
        lct = datetime.strptime('2022-'+lc+':00', "%Y-%m-%d %H:%M:%S")
        tdiff = (datetime.now() - lct).total_seconds() / 3600 / 24
        return int(tdiff), row['lc_m_dur_t']
    else:
        return None, None


def colorCell(val, **kwargs):
    print(val, kwargs)
    return 'color:%s' % 'black'



metaList = []
knownCnt = 1  # 已知项个数
missingCnt = 0  # 缺失项个数：缺失项总时间估计=missingCnt*已知总时间/已知cnt
completeTime = 0  # 无需rebuild项的总时间
rebuildTime = 0  # 需要rebuild项的总时间

pp = st.sidebar.progress(0)
with st.spinner('Wait for it...'):
    meta = milestones()  # milestones()
    pp.progress(5)
    meta = lcMilestoneBuild(meta)
    pp.progress(10)
    meta = vkMilestoneBuild(meta)
    pp.progress(15)
    meta = vkMilestoneDeploy(meta)
    pp.progress(25)
    meta[['f.mct_to_now', 'f.mbt']] = meta.apply(calcStatus, axis=1, result_type="expand")
    st.markdown('## 最近一次构建时间')
    toPrint1 = meta.pivot(index=['dataset', 'tp_size'], columns='db', values='f.mct_to_now')
    toPrint1 = toPrint1.style.format("{:.0f}天前", na_rep="-").background_gradient(cmap='Blues', vmin=0, vmax=60).highlight_null(null_color='red')
    st.dataframe(toPrint1, use_container_width=True, height=350)
    st.markdown('## 构建消耗时间')
    toPrint2 = meta.pivot(index=['dataset', 'tp_size'], columns='db', values='f.mbt')
    toPrint2 = toPrint2.style.format("{:.0f}s", na_rep="-").bar(color='#FFA500')  # .highlight_null(null_color='red')
    # st.write(toPrint2)
    # st.write(toPrint2.to_html(escape=False), unsafe_allow_html=True)
    st.dataframe(toPrint2, use_container_width=True, height=350)

    # toPrint1.style.format({'点击率': "{:.2%}", "留资率": "{:.2%}"})
    # toPrint1 = toPrint1.style.applymap(colorCell)
    # cmap = sns.diverging_palette(10, 250, sep=50, as_cmap=True)
    # .format("{:.2%}", subset=['人口增幅', '世界占比'])
    #     .background_gradient(subset=['点击率', '留资率'], cmap=cmap)\
    #     .apply(lambda x: ['background-color:yellow' if v >= toPrint1.nlargest(8, '线索成本')['线索成本'].min() else '' for v in x],
    #     subset=pd.IndexSlice[:, '线索成本'])


#
# # 'MILESTONE[Space]: Available builds (not deployed) on device(data)'
# if content_to_show == '最近一次测试日期':
#     toPrint1 = toPrint1.style.applymap(colorCell).highlight_null(null_color='red')  # , axis=0
# elif content_to_show == '最近一次测试耗时':
#     toPrint1 = toPrint1.style.highlight_null(null_color='red')  # .
# else:
#     toPrint1 = toPrint1.style.highlight_null(null_color='red')
# st.dataframe(toPrint1)

# print(missingCnt, knownCnt, completeTime, rebuildTime)
cols = st.columns(4)
cols[0].metric(label="已完成（浅绿）", value=timeHumanRead(completeTime))  # , delta="{}项".format(knownCnt)
missingTime = missingCnt * (completeTime+rebuildTime) / knownCnt
cols[1].metric(label="重构项（灰色）需时", value=timeHumanRead(rebuildTime))
cols[2].metric(label="缺失项（红色）需时（估计）", value=timeHumanRead(missingTime), delta="{}项".format(missingCnt), delta_color="inverse")
cols[3].metric(label="全部工作量（估计）", value=timeHumanRead(completeTime+rebuildTime+missingTime), delta="共{}项".format(missingCnt+knownCnt))



def spaceDetailData(meta):  # 总空间、空间明细
    query = leancloud.Query('TestMilestone')
    query.not_equal_to('extra', 'deploy')
    query.add_descending('createdAt')
    query.limit(999)
    objList = query.find()
    cache = HashResult(objList, ['DB', 'Dataset', 'MSize'], lcGetFunc)

    # st.write(cache._cache)

    def getDetail(dataset, db, mSize):
        buildInfo = cache.get(db.upper(), dataset, 't' + mSize)
        if len(buildInfo) == 0:
            raise DataNotReadyErr('no db matched of {} {} {}'.format(dataset, db, mSize))
        return buildInfo[0].get('detail')

    def calcSpace(dataset, db, mSize):
        raw = getDetail(dataset, db, mSize)
        # print(raw)
        content = json.loads(raw)
        # print(content),'time': t
        result = {'sData': 0, 'sIndex': 0, 'tData': 0, 'tIndex': 0}
        if db in ['MA', 'PG']:
            for table in content:
                if table['relname'].endswith('_tp'):
                    result['tData'] += table['tablesize']
                    result['tIndex'] += table['indexsize']
                else:
                    result['sData'] += table['tablesize']
                    result['sIndex'] += table['indexsize']
        elif db.startswith('TG'):
            result['sData'] = content['neo_data']
            result['sIndex'] = content['neo_index']
            result['tData'] = content['tp_node_data'] + content['tp_rel_data']
            result['tIndex'] = content['tp_node_index'] + content['tp_rel_index']
        elif db in ['N1', 'N2']:
            raw0 = getDetail(dataset, db, '.0')
            c0 = json.loads(raw0)
            # print(c0)
            result['sData'] = c0['data']
            result['sIndex'] = c0['index']
            result['tData'] = content['data'] - c0['data']
            result['tIndex'] = content['index'] - c0['index']
        else:
            raise DataNotReadyErr('no db matched of {} {} {}'.format(dataset, db, mSize))
        return result

    def mBuildSpaceInfo(row):
        dataset = row['dataset']
        db = row['db']
        tp_size = row['tp_size']
        try:
            spaceUsage = calcSpace(dataset, db, tp_size)
            return [spaceUsage['sData'], spaceUsage['sIndex'], spaceUsage['tData'], spaceUsage['tIndex'],
                    spaceUsage['sData'] + spaceUsage['sIndex'] + spaceUsage['tData'] + spaceUsage['tIndex']]
        except DataNotReadyErr as e:
            print('WARNING: ' + e.message)
            return [None, None, None, None, 0]

    meta[['lc_m.s.data',
          'lc_m.s.index',
          'lc_m.t.data',
          'lc_m.t.index',
          'lc_m.total_size']] = meta.apply(mBuildSpaceInfo, axis=1, result_type="expand")
    return meta


meta = spaceDetailData(meta)
meta = meta[meta['tp_size'] == '.all']
# st.dataframe(meta)
toPrint3 = meta.pivot(index=['dataset'], columns='db', values='lc_m.total_size')
func = lambda s: "{:.1f}GB".format(s/1024/1024/1024)
toPrint3 = toPrint3.style.format(func)
st.dataframe(toPrint3, use_container_width=True)
toPlot = meta[['db', 'dataset']]
toPlot['size'] = meta['lc_m.total_size'].astype(float)
toPlot['sd'] = meta['lc_m.s.data'].astype(float)
toPlot['si'] = meta['lc_m.s.index'].astype(float)
toPlot['td'] = meta['lc_m.t.data'].astype(float)
toPlot['ti'] = meta['lc_m.t.index'].astype(float)
# st.dataframe(toPlot)
# st.write(toPlot['lc_m.total_size']
# selection = alt.selection_multi(fields=['db'], bind='legend')
# alt.themes.enable('ggplot2')
alt.themes.enable('default')
bar = alt.Chart(toPlot).transform_fold(
    ['sd', 'si', 'td', 'ti'],
    as_=['category', 'size']
).mark_bar().encode(
    x=alt.X("db", title=None),
    y=alt.Y("sum(size):Q",
            stack='zero',
            axis=alt.Axis(format="~s"),
            title=None),
    color=alt.Color('category:N'),
    # tooltip=["db", "size", "lc_m.s.data", "lc_m.s.index", "lc_m.t.data", "lc_m.t.index"],
    # column="dataset",
    # opacity=alt.condition(selection, alt.value(1), alt.value(0.1))
).properties(
    width=250,
    height=460
)  # .add_selection(selection)
text = bar.mark_text(
    baseline='bottom',
).encode(
    text=alt.Text('label:N'),  # , format=',.2r')
    color=alt.value("#000")
).transform_joinaggregate(
    asize="sum(size):Q",
    groupby=["dataset", "db"]
).transform_calculate(
    label="format(datum.asize/1024/1024/1024, '.1f')"
)
c = alt.layer(bar, text).facet(
    facet='dataset',
    columns=3
).resolve_scale(
    y='independent'
)
st.altair_chart(c)
st.stop()

def spaceIncrement():  # 空间增长率分析
    raw = {
        '0.01': spaceDetailData(dataSize='.01'),
        '0.1': spaceDetailData(dataSize='.1'),
        # '0.5': spaceDetailData(dataSize='.5'),
        # '0.9': spaceDetailData(dataSize='.9'),
        '1': spaceDetailData(dataSize='.all')
    }
    df = dictKey2PdCol(raw, 'size')
    # print(df)
    tpData = df.loc[df['category'].isin(['tData', 'tIndex'])]
    # print(tpData)
    grp = tpData.groupby(['sys', 'dataset', 'size']).agg('sum')
    toPrint = pd.DataFrame(grp.reset_index())
    # print(toPrint)
    # toPrint['size'] = toPrint['size'].astype(float)
    # tpData = toPrint.loc[:,['sys', 'dataset', 'value', 'tSize']]
    # tpData
    # toPrint1 = toPrint.pivot(index=['sys', 'dataset'], columns='size', values='value')
    # toPrint2 = toPrint1.sort_values(by=['dataset', 'sys'])
    # toPrint2['inc_rate'] = toPrint2['.1'] / toPrint2['.01']
    # print(toPrint2)
    dodge_text = G.position_dodge(width=.9)
    g = G.ggplot(toPrint, G.aes('sys', 'value', fill='sys'))
    g = g + G.stat_summary(fun_y=np.mean, geom='bar', position=dodge_text, width=.9)
    # g += G.geom_bar()
    # g += G.geom_line()
    # g += G.geom_point()
    # + G.scale_x_datetime(breaks=date_breaks('1 month'))
    # g = g + G.ggtitle('Space cost of Comparable systems (Bytes)')
    g += G.scale_y_continuous(labels=lambda l: ["{:.1f} GB".format(v / 1024 / 1024 / 1024) for v in l])
    g += G.ylab('Database Size')
    # g += G.xlab('Systems')

    # g = G.ggplot(spaces, G.aes(x='type', y='exeTime', fill='sys'))
    # g = g + G.stat_summary(fun_y=np.mean, geom='bar', position=dodge_text, width=.9)
    # # g = g + stat_summary(fun_y=np.mean, geom=geom_text(, size=8))
    # g = g + G.geom_boxplot(width=.3, outlier_size=1, outlier_shape='.', position=dodge_text)
    # g = g + G.geom_text(G.aes(label=G.after_stat('y')), stat=G.stat_summary(fun_y=np.sum),
    #                     position=dodge_text, format_string='{:.1f}') #, va='bottom'
    # # g = g + geom_point()
    # # g = g + geom_jitter()
    # # g = g + lims(y=-2500)
    # g = g + G.facet_wrap('dataset', scales="free_y")
    g = g + G.facet_wrap('size + dataset', scales="free_y", ncol=3)
    # g = g + G.facet_grid('size ~ dataset + category', scales="free")
    # g = g + G.facet_grid('size ~ dataset', scales="free_y")  # row ~ column
    # g = g + G.ylim(0, 650)
    # # g = g + xlim(-1, 3700)
    # # g = g + G.scale_y_log10()
    g = g + G.theme(
        # axis_text_x=element_text(rotation=45, hjust=1),
        # legend_position='top',
        subplots_adjust={'wspace': 0.21},
        figure_size=(15, 6))
    # g = g + G.ggtitle('RW mix 0.1')
    return g.draw()


# st.pyplot(spaceIncrement())
