#
# Copyright 2017 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import warnings
import matplotlib.gridspec as gridspec
import matplotlib.pyplot as plt
import pandas as pd

from . import plotting
from . import performance as perf
from . import utils
import io
import base64
from jinja2 import Template
import numpy as np
from contextlib import redirect_stdout
from io import StringIO
import duckdb
from pathlib import Path

class GridFigure(object):
    """
    It makes life easier with grid plots
    """

    def __init__(self, rows, cols):
        self.rows = rows
        self.cols = cols
        self.fig = plt.figure(figsize=(14, rows * 7))
        self.gs = gridspec.GridSpec(rows, cols, wspace=0.4, hspace=0.3)
        self.curr_row = 0
        self.curr_col = 0

    def next_row(self):
        if self.curr_col != 0:
            self.curr_row += 1
            self.curr_col = 0
        subplt = plt.subplot(self.gs[self.curr_row, :])
        self.curr_row += 1
        return subplt

    def next_cell(self):
        if self.curr_col >= self.cols:
            self.curr_row += 1
            self.curr_col = 0
        subplt = plt.subplot(self.gs[self.curr_row, self.curr_col])
        self.curr_col += 1
        return subplt

    def close(self):
        plt.close(self.fig)
        self.fig = None
        self.gs = None


@plotting.customize
def create_summary_tear_sheet(factor_data, long_short=True, group_neutral=False):
    """
    Creates a small summary tear sheet with returns, information, and turnover
    analysis.

    Parameters
    ----------
    factor_data : pd.DataFrame - MultiIndex
        A MultiIndex DataFrame indexed by date (level 0) and asset (level 1),
        containing the values for a single alpha factor, forward returns for
        each period, the factor quantile/bin that factor value belongs to, and
        (optionally) the group the asset belongs to.
        - See full explanation in utils.get_clean_factor_and_forward_returns
    long_short : bool
        Should this computation happen on a long short portfolio? if so, then
        mean quantile returns will be demeaned across the factor universe.
    group_neutral : bool
        Should this computation happen on a group neutral portfolio? if so,
        returns demeaning will occur on the group level.
    """

    # Returns Analysis
    mean_quant_ret, std_quantile = perf.mean_return_by_quantile(
        factor_data,
        by_group=False,
        demeaned=long_short,
        group_adjust=group_neutral,
    )

    mean_quant_rateret = mean_quant_ret.apply(
        utils.rate_of_return, axis=0, base_period=mean_quant_ret.columns[0]
    )

    mean_quant_ret_bydate, std_quant_daily = perf.mean_return_by_quantile(
        factor_data,
        by_date=True,
        by_group=False,
        demeaned=long_short,
        group_adjust=group_neutral,
    )

    mean_quant_rateret_bydate = mean_quant_ret_bydate.apply(
        utils.rate_of_return,
        axis=0,
        base_period=mean_quant_ret_bydate.columns[0],
    )

    compstd_quant_daily = std_quant_daily.apply(
        utils.std_conversion, axis=0, base_period=std_quant_daily.columns[0]
    )

    alpha_beta = perf.factor_alpha_beta(
        factor_data, demeaned=long_short, group_adjust=group_neutral
    )

    mean_ret_spread_quant, std_spread_quant = perf.compute_mean_returns_spread(
        mean_quant_rateret_bydate,
        factor_data["factor_quantile"].max(),
        factor_data["factor_quantile"].min(),
        std_err=compstd_quant_daily,
    )

    periods = utils.get_forward_returns_columns(factor_data.columns)
    periods = list(map(lambda p: pd.Timedelta(p).days, periods))

    fr_cols = len(periods)
    vertical_sections = 2 + fr_cols * 3
    gf = GridFigure(rows=vertical_sections, cols=1)

    plotting.plot_quantile_statistics_table(factor_data)

    plotting.plot_returns_table(alpha_beta, mean_quant_rateret, mean_ret_spread_quant)

    plotting.plot_quantile_returns_bar(
        mean_quant_rateret,
        by_group=False,
        ylim_percentiles=None,
        ax=gf.next_row(),
    )

    # Information Analysis
    ic = perf.factor_information_coefficient(factor_data)
    plotting.plot_information_table(ic)

    # Turnover Analysis
    quantile_factor = factor_data["factor_quantile"]

    quantile_turnover = {
        p: pd.concat(
            [
                perf.quantile_turnover(quantile_factor, q, p)
                for q in range(1, int(quantile_factor.max()) + 1)
            ],
            axis=1,
        )
        for p in periods
    }

    autocorrelation = pd.concat(
        [perf.factor_rank_autocorrelation(factor_data, period) for period in periods],
        axis=1,
    )

    plotting.plot_turnover_table(autocorrelation, quantile_turnover)

    plt.show()
    gf.close()


@plotting.customize
def create_returns_tear_sheet(
    factor_data, long_short=True, group_neutral=False, by_group=False
):
    """
    Creates a tear sheet for returns analysis of a factor.

    Parameters
    ----------
    factor_data : pd.DataFrame - MultiIndex
        A MultiIndex DataFrame indexed by date (level 0) and asset (level 1),
        containing the values for a single alpha factor, forward returns for
        each period, the factor quantile/bin that factor value belongs to,
        and (optionally) the group the asset belongs to.
        - See full explanation in utils.get_clean_factor_and_forward_returns
    long_short : bool
        Should this computation happen on a long short portfolio? if so, then
        mean quantile returns will be demeaned across the factor universe.
        Additionally factor values will be demeaned across the factor universe
        when factor weighting the portfolio for cumulative returns plots
    group_neutral : bool
        Should this computation happen on a group neutral portfolio? if so,
        returns demeaning will occur on the group level.
        Additionally each group will weight the same in cumulative returns
        plots
    by_group : bool
        If True, display graphs separately for each group.
    """

    factor_returns = perf.factor_returns(factor_data, long_short, group_neutral)

    mean_quant_ret, std_quantile = perf.mean_return_by_quantile(
        factor_data,
        by_group=False,
        demeaned=long_short,
        group_adjust=group_neutral,
    )

    mean_quant_rateret = mean_quant_ret.apply(
        utils.rate_of_return, axis=0, base_period=mean_quant_ret.columns[0]
    )

    mean_quant_ret_bydate, std_quant_daily = perf.mean_return_by_quantile(
        factor_data,
        by_date=True,
        by_group=False,
        demeaned=long_short,
        group_adjust=group_neutral,
    )

    mean_quant_rateret_bydate = mean_quant_ret_bydate.apply(
        utils.rate_of_return,
        axis=0,
        base_period=mean_quant_ret_bydate.columns[0],
    )

    compstd_quant_daily = std_quant_daily.apply(
        utils.std_conversion, axis=0, base_period=std_quant_daily.columns[0]
    )

    alpha_beta = perf.factor_alpha_beta(
        factor_data, factor_returns, long_short, group_neutral
    )

    mean_ret_spread_quant, std_spread_quant = perf.compute_mean_returns_spread(
        mean_quant_rateret_bydate,
        factor_data["factor_quantile"].max(),
        factor_data["factor_quantile"].min(),
        std_err=compstd_quant_daily,
    )

    fr_cols = len(factor_returns.columns)
    vertical_sections = 2 + fr_cols * 3
    gf = GridFigure(rows=vertical_sections, cols=1)

    plotting.plot_returns_table(alpha_beta, mean_quant_rateret, mean_ret_spread_quant)

    plotting.plot_quantile_returns_bar(
        mean_quant_rateret,
        by_group=False,
        ylim_percentiles=None,
        ax=gf.next_row(),
    )

    plotting.plot_quantile_returns_violin(
        mean_quant_rateret_bydate, ylim_percentiles=(1, 99), ax=gf.next_row()
    )

    trading_calendar = factor_data.index.levels[0].freq
    if trading_calendar is None:
        trading_calendar = pd.tseries.offsets.BDay()
        warnings.warn(
            "'freq' not set in factor_data index: assuming business day",
            UserWarning,
        )

    # Compute cumulative returns from daily simple returns, if '1D'
    # returns are provided.
    if "1D" in factor_returns:
        title = (
            "Factor Weighted "
            + ("Group Neutral " if group_neutral else "")
            + ("Long/Short " if long_short else "")
            + "Portfolio Cumulative Return (1D Period)"
        )

        plotting.plot_cumulative_returns(
            factor_returns["1D"], period="1D", title=title, ax=gf.next_row()
        )

        plotting.plot_cumulative_returns_by_quantile(
            mean_quant_ret_bydate["1D"], period="1D", ax=gf.next_row()
        )

    ax_mean_quantile_returns_spread_ts = [gf.next_row() for x in range(fr_cols)]
    plotting.plot_mean_quantile_returns_spread_time_series(
        mean_ret_spread_quant,
        std_err=std_spread_quant,
        bandwidth=0.5,
        ax=ax_mean_quantile_returns_spread_ts,
    )

    plt.show()
    gf.close()

    if by_group:
        (
            mean_return_quantile_group,
            mean_return_quantile_group_std_err,
        ) = perf.mean_return_by_quantile(
            factor_data,
            by_date=False,
            by_group=True,
            demeaned=long_short,
            group_adjust=group_neutral,
        )

        mean_quant_rateret_group = mean_return_quantile_group.apply(
            utils.rate_of_return,
            axis=0,
            base_period=mean_return_quantile_group.columns[0],
        )

        num_groups = len(
            mean_quant_rateret_group.index.get_level_values("group").unique()
        )

        vertical_sections = 1 + (((num_groups - 1) // 2) + 1)
        gf = GridFigure(rows=vertical_sections, cols=2)

        ax_quantile_returns_bar_by_group = [gf.next_cell() for _ in range(num_groups)]
        plotting.plot_quantile_returns_bar(
            mean_quant_rateret_group,
            by_group=True,
            ylim_percentiles=(5, 95),
            ax=ax_quantile_returns_bar_by_group,
        )
        plt.show()
        gf.close()


@plotting.customize
def create_information_tear_sheet(factor_data, group_neutral=False, by_group=False):
    """
    Creates a tear sheet for information analysis of a factor.

    Parameters
    ----------
    factor_data : pd.DataFrame - MultiIndex
        A MultiIndex DataFrame indexed by date (level 0) and asset (level 1),
        containing the values for a single alpha factor, forward returns for
        each period, the factor quantile/bin that factor value belongs to, and
        (optionally) the group the asset belongs to.
        - See full explanation in utils.get_clean_factor_and_forward_returns
    group_neutral : bool
        Demean forward returns by group before computing IC.
    by_group : bool
        If True, display graphs separately for each group.
    """

    ic = perf.factor_information_coefficient(factor_data, group_neutral)

    plotting.plot_information_table(ic)

    columns_wide = 2
    fr_cols = len(ic.columns)
    rows_when_wide = ((fr_cols - 1) // columns_wide) + 1
    vertical_sections = fr_cols + 3 * rows_when_wide + 2 * fr_cols
    gf = GridFigure(rows=vertical_sections, cols=columns_wide)

    ax_ic_ts = [gf.next_row() for _ in range(fr_cols)]
    plotting.plot_ic_ts(ic, ax=ax_ic_ts)

    ax_ic_hqq = [gf.next_cell() for _ in range(fr_cols * 2)]
    plotting.plot_ic_hist(ic, ax=ax_ic_hqq[::2])
    plotting.plot_ic_qq(ic, ax=ax_ic_hqq[1::2])

    if not by_group:

        mean_monthly_ic = perf.mean_information_coefficient(
            factor_data,
            group_adjust=group_neutral,
            by_group=False,
            by_time="M",
        )
        ax_monthly_ic_heatmap = [gf.next_cell() for x in range(fr_cols)]
        plotting.plot_monthly_ic_heatmap(mean_monthly_ic, ax=ax_monthly_ic_heatmap)

    if by_group:
        mean_group_ic = perf.mean_information_coefficient(
            factor_data, group_adjust=group_neutral, by_group=True
        )

        plotting.plot_ic_by_group(mean_group_ic, ax=gf.next_row())

    plt.show()
    gf.close()


@plotting.customize
def create_turnover_tear_sheet(factor_data, turnover_periods=None):
    """
    Creates a tear sheet for analyzing the turnover properties of a factor.

    Parameters
    ----------
    factor_data : pd.DataFrame - MultiIndex
        A MultiIndex DataFrame indexed by date (level 0) and asset (level 1),
        containing the values for a single alpha factor, forward returns for
        each period, the factor quantile/bin that factor value belongs to, and
        (optionally) the group the asset belongs to.
        - See full explanation in utils.get_clean_factor_and_forward_returns
    turnover_periods : sequence[string], optional
        Periods to compute turnover analysis on. By default periods in
        'factor_data' are used but custom periods can provided instead. This
        can be useful when periods in 'factor_data' are not multiples of the
        frequency at which factor values are computed i.e. the periods
        are 2h and 4h and the factor is computed daily and so values like
        ['1D', '2D'] could be used instead
    """

    if turnover_periods is None:
        input_periods = utils.get_forward_returns_columns(
            factor_data.columns, require_exact_day_multiple=True
        ).to_numpy()
        turnover_periods = utils.timedelta_strings_to_integers(input_periods)
    else:
        turnover_periods = utils.timedelta_strings_to_integers(turnover_periods)

    quantile_factor = factor_data["factor_quantile"]

    quantile_turnover = {
        p: pd.concat(
            [
                perf.quantile_turnover(quantile_factor, q, p)
                for q in quantile_factor.sort_values().unique().tolist()
            ],
            axis=1,
        )
        for p in turnover_periods
    }

    autocorrelation = pd.concat(
        [
            perf.factor_rank_autocorrelation(factor_data, period)
            for period in turnover_periods
        ],
        axis=1,
    )

    plotting.plot_turnover_table(autocorrelation, quantile_turnover)

    fr_cols = len(turnover_periods)
    columns_wide = 1
    rows_when_wide = ((fr_cols - 1) // 1) + 1
    vertical_sections = fr_cols + 3 * rows_when_wide + 2 * fr_cols
    gf = GridFigure(rows=vertical_sections, cols=columns_wide)

    for period in turnover_periods:
        if quantile_turnover[period].isnull().all().all():
            continue
        plotting.plot_top_bottom_quantile_turnover(
            quantile_turnover[period], period=period, ax=gf.next_row()
        )

    for period in autocorrelation:
        if autocorrelation[period].isnull().all():
            continue
        plotting.plot_factor_rank_auto_correlation(
            autocorrelation[period], period=period, ax=gf.next_row()
        )

    plt.show()
    gf.close()


@plotting.customize
def create_full_tear_sheet(
    factor_data, long_short=True, group_neutral=False, by_group=False
):
    """
    Creates a full tear sheet for analysis and evaluating single
    return predicting (alpha) factor.

    Parameters
    ----------
    factor_data : pd.DataFrame - MultiIndex
        A MultiIndex DataFrame indexed by date (level 0) and asset (level 1),
        containing the values for a single alpha factor, forward returns for
        each period, the factor quantile/bin that factor value belongs to, and
        (optionally) the group the asset belongs to.
        - See full explanation in utils.get_clean_factor_and_forward_returns
    long_short : bool
        Should this computation happen on a long short portfolio?
        - See tears.create_returns_tear_sheet for details on how this flag
        affects returns analysis
    group_neutral : bool
        Should this computation happen on a group neutral portfolio?
        - See tears.create_returns_tear_sheet for details on how this flag
        affects returns analysis
        - See tears.create_information_tear_sheet for details on how this
        flag affects information analysis
    by_group : bool
        If True, display graphs separately for each group.
    """

    plotting.plot_quantile_statistics_table(factor_data)
    create_returns_tear_sheet(
        factor_data, long_short, group_neutral, by_group, set_context=False
    )
    create_information_tear_sheet(
        factor_data, group_neutral, by_group, set_context=False
    )
    create_turnover_tear_sheet(factor_data, set_context=False)


@plotting.customize
def create_event_returns_tear_sheet(
    factor_data,
    returns,
    avgretplot=(5, 15),
    long_short=True,
    group_neutral=False,
    std_bar=True,
    by_group=False,
):
    """
    Creates a tear sheet to view the average cumulative returns for a
    factor within a window (pre and post event).

    Parameters
    ----------
    factor_data : pd.DataFrame - MultiIndex
        A MultiIndex Series indexed by date (level 0) and asset (level 1),
        containing the values for a single alpha factor, the factor
        quantile/bin that factor value belongs to and (optionally) the group
        the asset belongs to.
        - See full explanation in utils.get_clean_factor_and_forward_returns
    returns : pd.DataFrame
        A DataFrame indexed by date with assets in the columns containing daily
        returns.
        - See full explanation in utils.get_clean_factor_and_forward_returns
    avgretplot: tuple (int, int) - (before, after)
        If not None, plot quantile average cumulative returns
    long_short : bool
        Should this computation happen on a long short portfolio? if so then
        factor returns will be demeaned across the factor universe
    group_neutral : bool
        Should this computation happen on a group neutral portfolio? if so,
        returns demeaning will occur on the group level.
    std_bar : boolean, optional
        Show plots with standard deviation bars, one for each quantile
    by_group : bool
        If True, display graphs separately for each group.
    """

    before, after = avgretplot

    avg_cumulative_returns = perf.average_cumulative_return_by_quantile(
        factor_data,
        returns,
        periods_before=before,
        periods_after=after,
        demeaned=long_short,
        group_adjust=group_neutral,
    )

    num_quantiles = int(factor_data["factor_quantile"].max())

    vertical_sections = 1
    if std_bar:
        vertical_sections += ((num_quantiles - 1) // 2) + 1
    cols = 2 if num_quantiles != 1 else 1
    gf = GridFigure(rows=vertical_sections, cols=cols)
    plotting.plot_quantile_average_cumulative_return(
        avg_cumulative_returns,
        by_quantile=False,
        std_bar=False,
        ax=gf.next_row(),
    )
    if std_bar:
        ax_avg_cumulative_returns_by_q = [gf.next_cell() for _ in range(num_quantiles)]
        plotting.plot_quantile_average_cumulative_return(
            avg_cumulative_returns,
            by_quantile=True,
            std_bar=True,
            ax=ax_avg_cumulative_returns_by_q,
        )

    plt.show()
    gf.close()

    if by_group:
        groups = factor_data["group"].unique()
        num_groups = len(groups)
        vertical_sections = ((num_groups - 1) // 2) + 1
        gf = GridFigure(rows=vertical_sections, cols=2)

        avg_cumret_by_group = perf.average_cumulative_return_by_quantile(
            factor_data,
            returns,
            periods_before=before,
            periods_after=after,
            demeaned=long_short,
            group_adjust=group_neutral,
            by_group=True,
        )

        for group, avg_cumret in avg_cumret_by_group.groupby(level="group"):
            avg_cumret.index = avg_cumret.index.droplevel("group")
            plotting.plot_quantile_average_cumulative_return(
                avg_cumret,
                by_quantile=False,
                std_bar=False,
                title=group,
                ax=gf.next_cell(),
            )

        plt.show()
        gf.close()


@plotting.customize
def create_event_study_tear_sheet(
    factor_data, returns, avgretplot=(5, 15), rate_of_ret=True, n_bars=50
):
    """
    Creates an event study tear sheet for analysis of a specific event.

    Parameters
    ----------
    factor_data : pd.DataFrame - MultiIndex
        A MultiIndex DataFrame indexed by date (level 0) and asset (level 1),
        containing the values for a single event, forward returns for each
        period, the factor quantile/bin that factor value belongs to, and
        (optionally) the group the asset belongs to.
    returns : pd.DataFrame, required only if 'avgretplot' is provided
        A DataFrame indexed by date with assets in the columns containing daily
        returns.
        - See full explanation in utils.get_clean_factor_and_forward_returns
    avgretplot: tuple (int, int) - (before, after), optional
        If not None, plot event style average cumulative returns within a
        window (pre and post event).
    rate_of_ret : bool, optional
        Display rate of return instead of simple return in 'Mean Period Wise
        Return By Factor Quantile' and 'Period Wise Return By Factor Quantile'
        plots
    n_bars : int, optional
        Number of bars in event distribution plot
    """

    long_short = False

    plotting.plot_quantile_statistics_table(factor_data)

    gf = GridFigure(rows=1, cols=1)
    plotting.plot_events_distribution(
        events=factor_data["factor"], num_bars=n_bars, ax=gf.next_row()
    )
    plt.show()
    gf.close()

    if returns is not None and avgretplot is not None:

        create_event_returns_tear_sheet(
            factor_data=factor_data,
            returns=returns,
            avgretplot=avgretplot,
            long_short=long_short,
            group_neutral=False,
            std_bar=True,
            by_group=False,
        )

    factor_returns = perf.factor_returns(factor_data, demeaned=False, equal_weight=True)

    mean_quant_ret, std_quantile = perf.mean_return_by_quantile(
        factor_data, by_group=False, demeaned=long_short
    )
    if rate_of_ret:
        mean_quant_ret = mean_quant_ret.apply(
            utils.rate_of_return, axis=0, base_period=mean_quant_ret.columns[0]
        )

    mean_quant_ret_bydate, std_quant_daily = perf.mean_return_by_quantile(
        factor_data, by_date=True, by_group=False, demeaned=long_short
    )
    if rate_of_ret:
        mean_quant_ret_bydate = mean_quant_ret_bydate.apply(
            utils.rate_of_return,
            axis=0,
            base_period=mean_quant_ret_bydate.columns[0],
        )

    fr_cols = len(factor_returns.columns)
    vertical_sections = 2 + fr_cols * 1
    gf = GridFigure(rows=vertical_sections + 1, cols=1)

    plotting.plot_quantile_returns_bar(
        mean_quant_ret, by_group=False, ylim_percentiles=None, ax=gf.next_row()
    )

    plotting.plot_quantile_returns_violin(
        mean_quant_ret_bydate, ylim_percentiles=(1, 99), ax=gf.next_row()
    )

    trading_calendar = factor_data.index.levels[0].freq
    if trading_calendar is None:
        trading_calendar = pd.tseries.offsets.BDay()
        warnings.warn(
            "'freq' not set in factor_data index: assuming business day",
            UserWarning,
        )

    plt.show()
    gf.close()


def figure_to_base64(fig):
    """将matplotlib图形转换为base64字符串"""
    buf = io.BytesIO()
    fig.savefig(buf, format='png', bbox_inches='tight')
    buf.seek(0)
    return base64.b64encode(buf.getvalue()).decode('utf-8')

def table_to_html(table, name=None, fmt=None):
    """将pandas DataFrame转换为HTML字符串"""
    if fmt is None:
        fmt = '{:.4f}'
    
    if isinstance(table, pd.Series):
        table = pd.DataFrame(table)

    if isinstance(fmt, str):
        formatters = {col: lambda x: fmt.format(x) 
                     for col in table.columns 
                     if isinstance(table[col].dtype, (np.floating, float))}
    else:
        formatters = fmt

    html = table.to_html(float_format=lambda x: '{:.4f}'.format(x) if isinstance(x, (float, np.float64)) else x,
                        formatters=formatters,
                        classes='dataframe',
                        border=1,
                        justify='center')
    
    if name is not None:
        html = f"<h4>{name}</h4>\n" + html
        
    return html

@plotting.customize
def create_full_tear_sheet_html(
    factor_data, 
    long_short=True, 
    group_neutral=False, 
    by_group=False,
    output_file='factor_analysis.html'
):
    """
    创建完整的因子分析报告并输出到HTML文件
    
    Parameters
    ----------
    factor_data : pd.DataFrame - MultiIndex
        因子数据
    long_short : bool
        是否使用多空组合
    group_neutral : bool
        是否进行分组中性化
    by_group : bool
        是否按组显示图表
    output_file : str
        输出HTML文件的路径
    """
    figures = []
    tables = []
    
    # 保存原始的plt.show函数
    original_show = plt.show
    
    def custom_show(*args, **kwargs):
        """自定义的plt.show函数,用于捕获图形"""
        fig = plt.gcf()
        buf = io.BytesIO()
        fig.savefig(buf, format='png', bbox_inches='tight', dpi=150)
        buf.seek(0)
        img_str = base64.b64encode(buf.getvalue()).decode('utf-8')
        figures.append(img_str)
        plt.close(fig)
    
    def save_table(table, name=None, fmt=None):
        tables.append(table_to_html(table, name, fmt))
    
    # 替换函数
    plt.show = custom_show
    original_print_table = utils.print_table
    utils.print_table = save_table

    try:
        # 捕获所有输出
        output = StringIO()
        with redirect_stdout(output):
            # 1. 基础统计信息
            plotting.plot_quantile_statistics_table(factor_data)
            
            # 2. 收益分析
            create_returns_tear_sheet(
                factor_data, 
                long_short, 
                group_neutral, 
                by_group, 
                set_context=False
            )
            
            # 3. 信息分析
            create_information_tear_sheet(
                factor_data,
                group_neutral,
                by_group,
                set_context=False
            )
            
            # 4. 换手率分析
            create_turnover_tear_sheet(
                factor_data,
                set_context=False
            )
            
    finally:
        # 恢复原始函数
        plt.show = original_show
        utils.print_table = original_print_table
        # 关闭所有剩余的图形
        plt.close('all')

    # HTML模板
    template = Template("""
    <!DOCTYPE html>
    <html>
    <head>
        <title>因子分析报告</title>
        <style>
            body {
                font-family: Arial, sans-serif;
                margin: 20px;
                background-color: #f5f5f5;
            }
            .container {
                max-width: 1200px;
                margin: 0 auto;
                background-color: white;
                padding: 20px;
                box-shadow: 0 0 10px rgba(0,0,0,0.1);
            }
            .figure { 
                margin: 20px 0; 
                text-align: center; 
            }
            img {
                max-width: 100%;
                height: auto;
                margin: 20px 0;
                border: 1px solid #ddd;
                border-radius: 5px;
            }
            .dataframe {
                border-collapse: collapse;
                margin: 20px 0;
                width: 100%;
            }
            .dataframe th, .dataframe td {
                padding: 8px;
                text-align: center;
                border: 1px solid #ddd;
            }
            .dataframe th {
                background-color: #f5f5f5;
            }
            .dataframe tr:nth-child(even) {
                background-color: #f9f9f9;
            }
            h1 {
                color: #333;
                border-bottom: 2px solid #eee;
                padding-bottom: 10px;
            }
            h4 {
                color: #333;
                margin-top: 30px;
            }
        </style>
    </head>
    <body>
        <div class="container">
            <h1>因子分析报告</h1>
            
            {% for table in tables %}
            <div class="table">
                {{ table | safe }}
            </div>
            {% endfor %}
            
            {% for figure in figures %}
            <div class="figure">
                <img src="data:image/png;base64,{{ figure }}" />
            </div>
            {% endfor %}
        </div>
    </body>
    </html>
    """)
    
    # 生成HTML文件
    html_content = template.render(figures=figures, tables=tables)
    with open(output_file, 'w', encoding='utf-8') as f:
        f.write(html_content)

    
def save_factor_analysis_to_db(
    factor_name,
    factor_expr,
    returns_table, 
    ic_summary_table, 
    conn,
    analysis_date=None
):
    """
    将因子分析结果保存到DuckDB数据库中，包括收益、IC分析指标以及正负IC得分
    """
    import datetime
    import numpy as np
    
    if analysis_date is None:
        analysis_date = datetime.datetime.now()
        
    # 创建因子分析结果表（增加pos_score和neg_score字段）
    conn.execute("""
        CREATE TABLE IF NOT EXISTS factor_analysis_results (
            factor_name VARCHAR,
            factor_expr VARCHAR,
            analysis_date TIMESTAMP,
            
            -- 收益分析指标
            top_quantile_return DOUBLE,
            bottom_quantile_return DOUBLE,
            quantile_spread DOUBLE,
            alpha DOUBLE,
            beta DOUBLE,
            
            -- IC分析指标
            ic_mean DOUBLE,
            ic_std DOUBLE,
            ic_ir DOUBLE,
            ic_t_stat DOUBLE,
            ic_p_value DOUBLE,
            ic_skew DOUBLE,
            ic_kurtosis DOUBLE,
            
            -- IC得分指标
            pos_score DOUBLE,
            neg_score DOUBLE
        )
    """)
    
    # 计算正负IC得分
    periods = ic_summary_table.index.tolist()
    weights = np.array([1/len(periods)] * len(periods))  # 简单的等权重
    
    pos_IC_list = []
    neg_IC_list = []
    pos_IR_list = []
    neg_IR_list = []
    all_IC = []
    
    for period in periods:
        ic_mean = ic_summary_table.loc[period, 'IC均值']
        ic_ir = ic_summary_table.loc[period, '风险调整 IC']
        all_IC.append(ic_mean)
        
        if ic_mean > 0:
            pos_IC_list.append(ic_mean)
            pos_IR_list.append(ic_ir)
        else:
            neg_IC_list.append(ic_mean)
            neg_IR_list.append(ic_ir)
    
    # 计算加权平均值
    weighted_pos_IC = np.sum(np.array(pos_IC_list) * np.array(weights[:len(pos_IC_list)])) if pos_IC_list else 0
    weighted_neg_IC = np.sum(np.array(neg_IC_list) * np.array(weights[:len(neg_IC_list)])) if neg_IC_list else 0
    
    weighted_pos_IR = np.sum(np.array(pos_IR_list) * np.array(weights[:len(pos_IR_list)])) if pos_IR_list else 0
    weighted_neg_IR = np.sum(np.array(neg_IR_list) * np.array(weights[:len(neg_IR_list)])) if neg_IR_list else 0
    
    # 计算得分
    pos_score = weighted_pos_IC * weighted_pos_IR
    neg_score = weighted_neg_IC * weighted_neg_IR
    
    top_quantile_return=returns_table.loc['周期性回报顶部分位数的平均值（基点）']['10D']
    bottom_quantile_return=returns_table.loc['周期性回报底部分位数的平均值（基点）']['10D']
    # 构建单行数据
    result_dict = {
        'factor_name': factor_name,
        'factor_expr': factor_expr,
        'analysis_date': analysis_date,
        
        # 收益分析指标 - 获取10D列的数据
        
        'top_quantile_return': top_quantile_return,
        'bottom_quantile_return':bottom_quantile_return, 
        'quantile_spread': top_quantile_return - bottom_quantile_return,
        'alpha': returns_table.loc['Ann. alpha[年化alpha]']['10D'],
        'beta': returns_table.loc['beta[衡量波动性]']['10D'],
        
        # IC分析指标
        'ic_mean': ic_summary_table.loc['10D', 'IC均值'],
        'ic_std': ic_summary_table.loc['10D', 'IC标准差'],
        'ic_ir': ic_summary_table.loc['10D', '风险调整 IC'],
        'ic_t_stat': ic_summary_table.loc['10D', 't统计量(IC)[越大越好]'],
        'ic_p_value': ic_summary_table.loc['10D', 'p值(IC)[越小越好]'],
        'ic_skew': ic_summary_table.loc['10D', 'IC偏度'],
        'ic_kurtosis': ic_summary_table.loc['10D', 'IC峰度'],
        
        # IC得分指标
        'pos_score': pos_score,
        'neg_score': neg_score
    }
    
    # 转换为DataFrame并插入数据库
    result_df = pd.DataFrame([result_dict])
    conn.execute("INSERT INTO factor_analysis_results SELECT * FROM result_df")

def analyze_factor_and_save(
    factor_data,
    factor_name,
    factor_expr,
    long_short=True,
    group_neutral=False,
    by_group=False,
    analysis_date=None
):
    """
    执行因子分析并将结果保存到数据库
    
    Parameters
    ----------
    factor_data : pd.DataFrame - MultiIndex
        因子数据，包含因子值和收益率信息
    factor_name : str
        因子名称
    factor_expr: str
        因子公式
    long_short : bool
        是否使用多空组合
    group_neutral : bool
        是否进行分组中性化
    by_group : bool
        是否按组显示图表
    analysis_date : datetime, optional
        分析日期，默认为当前日期
    """
    import datetime
    
    if analysis_date is None:
        analysis_date = datetime.datetime.now()

    # 获取用户主目录并创建alphalens_data目录
    data_dir = Path.home() / "alphalens_data"
    data_dir.mkdir(parents=True, exist_ok=True)
    
    # 设置数据库文件路径
    db_path = data_dir / "factor_stats.duckdb"

    # 计算各项分析指标
    factor_returns = perf.factor_returns(factor_data, long_short, group_neutral)
    
    mean_quant_ret, std_quantile = perf.mean_return_by_quantile(
        factor_data,
        by_group=False,
        demeaned=long_short,
        group_adjust=group_neutral
    )

    mean_quant_rateret = mean_quant_ret.apply(
        utils.rate_of_return,
        axis=0,
        base_period=mean_quant_ret.columns[0]
    )

    mean_quant_ret_bydate, std_quant_daily = perf.mean_return_by_quantile(
        factor_data,
        by_date=True,
        by_group=False,
        demeaned=long_short,
        group_adjust=group_neutral
    )

    alpha_beta = perf.factor_alpha_beta(
        factor_data,
        factor_returns,
        long_short,
        group_neutral
    )

    mean_ret_spread_quant, std_spread_quant = perf.compute_mean_returns_spread(
        mean_quant_ret_bydate,
        factor_data["factor_quantile"].max(),
        factor_data["factor_quantile"].min()
    )

    # 计算IC相关指标
    ic = perf.factor_information_coefficient(factor_data, group_neutral)

    # 获取分析表格
    returns_table = plotting.plot_returns_table(
        alpha_beta,
        mean_quant_rateret,
        mean_ret_spread_quant,
        return_df=True
    )

    ic_summary_table = plotting.plot_information_table(ic, return_df=True)

    # 连接数据库并保存结果
    con = duckdb.connect(str(db_path))
    
    try:
        save_factor_analysis_to_db(
            factor_name,
            factor_expr,
            returns_table,
            ic_summary_table,
            con,
            analysis_date
        )
    finally:
        con.close()
