import logging
from typing import List, Dict, Optional
from datetime import datetime, timedelta
import os
import io
import base64
import matplotlib
# 在导入 pyplot 之前设置后端为 Agg
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from matplotlib.font_manager import FontProperties
import pandas as pd
import seaborn as sns
from jinja2 import Environment, FileSystemLoader
import pdfkit
import json
import time
from io import BytesIO

from app import db
from app.models import Article, SocialMediaPost, Keyword, SentimentHistory
from app.data_processing.trend_analyzer import TrendAnalyzer
from app.data_processing.hotspot_detector import HotspotDetector
from app.data_processing.location_analyzer import LocationAnalyzer

# 首先声明要导出的内容
__all__ = [
    'ReportGenerator',
    'generate_daily_report',
    'generate_weekly_report',
    'generate_monthly_report',
    'generate_custom_report'
]

def generate_daily_report(date=None):
    """生成每日报告"""
    if date is None:
        date = datetime.now()
        
    report = {
        'date': date.strftime('%Y-%m-%d'),
        'summary': {
            'total_mentions': 1000,
            'sentiment_distribution': {
                'positive': 45,
                'neutral': 30,
                'negative': 25
            },
            'top_topics': [
                {'topic': '话题1', 'mentions': 100},
                {'topic': '话题2', 'mentions': 80},
                {'topic': '话题3', 'mentions': 60}
            ]
        }
    }
    
    return report

def generate_weekly_report(start_date=None):
    """生成每周报告"""
    if start_date is None:
        start_date = datetime.now() - timedelta(days=7)
        
    report = {
        'period': f"{start_date.strftime('%Y-%m-%d')} to {(start_date + timedelta(days=6)).strftime('%Y-%m-%d')}",
        'summary': {
            'total_mentions': 5000,
            'trend': 'increasing',
            'top_influencers': [
                {'name': '用户1', 'impact_score': 90},
                {'name': '用户2', 'impact_score': 85},
                {'name': '用户3', 'impact_score': 80}
            ]
        }
    }
    
    return report

def generate_monthly_report(year, month):
    """生成月度报告"""
    report = {
        'period': f"{year}年{month}月",
        'summary': {
            'total_mentions': 20000,
            'key_events': [
                {'date': '2024-03-01', 'event': '事件1', 'impact': 'high'},
                {'date': '2024-03-15', 'event': '事件2', 'impact': 'medium'},
                {'date': '2024-03-30', 'event': '事件3', 'impact': 'low'}
            ]
        }
    }
    
    return report

def generate_custom_report(start_date, end_date, report_type='custom'):
    """生成自定义报告"""
    report = {
        'period': f"{start_date.strftime('%Y-%m-%d')} to {end_date.strftime('%Y-%m-%d')}",
        'type': report_type,
        'summary': {
            'total_mentions': 15000,
            'sentiment_trend': 'stable',
            'key_findings': [
                '发现1',
                '发现2',
                '发现3'
            ]
        }
    }
    
    return report

class ReportGenerator:
    """舆情分析报告生成器"""
    
    def __init__(self):
        self.logger = logging.getLogger(__name__)
        self.trend_analyzer = TrendAnalyzer()
        self.hotspot_detector = HotspotDetector()
        self.location_analyzer = LocationAnalyzer()
        
        # 设置中文字体
        self.base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
        self.font_path = os.path.join(self.base_dir, 'static', 'fonts', 'SimHei.ttf')
        
        # 配置 matplotlib
        plt.rcParams['font.sans-serif'] = ['SimHei']
        plt.rcParams['axes.unicode_minus'] = False
        
        if os.path.exists(self.font_path):
            self.font = FontProperties(fname=self.font_path)
            plt.rcParams['font.family'] = self.font.get_name()
            self.logger.info(f"已加载中文字体: {self.font_path}")
        else:
            self.logger.warning(f"中文字体文件不存在: {self.font_path}")
            self.font = None
            
        self.logger.info("报告生成器初始化完成")
        
        self.data = None
    
    def load_data(self, data):
        """加载数据"""
        self.data = data
    
    def generate_sentiment_chart(self):
        """生成情感分析图表"""
        # 示例数据
        sentiment_data = {
            'positive': 45,
            'neutral': 30,
            'negative': 25
        }
        
        plt.figure(figsize=(8, 6))
        plt.pie(sentiment_data.values(), labels=sentiment_data.keys(), autopct='%1.1f%%')
        plt.title('情感分布')
        
        # 将图表转换为base64字符串
        buffer = BytesIO()
        plt.savefig(buffer, format='png')
        buffer.seek(0)
        image_png = buffer.getvalue()
        buffer.close()
        
        return base64.b64encode(image_png).decode()

    def generate_trend_chart(self, start_date, end_date):
        """生成趋势图表"""
        # 示例数据
        dates = pd.date_range(start=start_date, end=end_date)
        trend_data = {
            'date': dates,
            'volume': [100 + i * 10 for i in range(len(dates))]
        }
        df = pd.DataFrame(trend_data)
        
        plt.figure(figsize=(10, 6))
        plt.plot(df['date'], df['volume'])
        plt.title('舆情趋势')
        plt.xlabel('日期')
        plt.ylabel('数量')
        
        buffer = BytesIO()
        plt.savefig(buffer, format='png')
        buffer.seek(0)
        image_png = buffer.getvalue()
        buffer.close()
        
        return base64.b64encode(image_png).decode()

    def generate_daily_report(self, days: int = 1) -> Dict:
        """
        生成每日舆情分析报告
        
        Args:
            days: 报告包含的天数，默认为1天（今日报告）
            
        Returns:
            报告数据
        """
        start_time = time.time()
        self.logger.info(f"开始生成每日报告 (days={days})...")
        
        try:
            # 获取时间范围
            end_date = datetime.now()
            start_date = end_date - timedelta(days=days)
            
            # 报告标题
            if days == 1:
                title = f"每日舆情分析报告 - {end_date.strftime('%Y年%m月%d日')}"
            else:
                title = f"舆情分析报告 - {start_date.strftime('%Y年%m月%d日')} 至 {end_date.strftime('%Y年%m月%d日')}"
            
            self.logger.info(f"报告标题: {title}")
            self.logger.info(f"时间范围: {start_date.strftime('%Y-%m-%d')} 至 {end_date.strftime('%Y-%m-%d')}")
            
            # 1. 获取基本统计数据
            self.logger.info("正在获取基本统计数据...")
            stats = self._get_basic_statistics(days)
            self.logger.info(f"基本统计数据获取完成: 总内容数={stats.get('total_count', 0)}")
            
            # 2. 获取情感分析数据
            self.logger.info("正在获取情感分析数据...")
            sentiment_data = self.trend_analyzer.analyze_sentiment_trend(days)
            self.logger.info("情感分析数据获取完成")
            
            # 3. 获取热点话题
            self.logger.info("正在获取热点话题...")
            hot_topics = self.hotspot_detector.detect_hot_topics(days)
            self.logger.info(f"热点话题获取完成: {len(hot_topics) if hot_topics else 0}个话题")
            
            # 4. 获取地域分布
            self.logger.info("正在获取地域分布...")
            location_data = self.location_analyzer.analyze_location_distribution(days)
            self.logger.info("地域分布获取完成")
            
            # 5. 获取媒体来源分布
            self.logger.info("正在获取媒体来源分布...")
            source_data = self.trend_analyzer.analyze_source_distribution(days)
            self.logger.info("媒体来源分布获取完成")
            
            # 6. 获取声量趋势
            self.logger.info("正在获取声量趋势...")
            volume_data = self.trend_analyzer.analyze_volume_trend(days)
            self.logger.info("声量趋势获取完成")
            
            # 整合报告数据
            report_data = {
                'title': title,
                'generation_time': datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
                'time_range': {
                    'start': start_date.strftime('%Y-%m-%d'),
                    'end': end_date.strftime('%Y-%m-%d'),
                    'days': days
                },
                'statistics': stats,
                'sentiment_analysis': sentiment_data,
                'hot_topics': hot_topics[:10] if hot_topics else [],  # 只取前10个热点话题
                'location_distribution': location_data,
                'source_distribution': source_data,
                'volume_trend': volume_data
            }
            
            elapsed_time = time.time() - start_time
            self.logger.info(f"每日报告生成完成，耗时: {elapsed_time:.2f}秒")
            
            return report_data
            
        except Exception as e:
            self.logger.error(f"生成每日报告失败: {str(e)}", exc_info=True)
            return {'error': str(e)}
    
    def generate_weekly_report(self) -> Dict:
        """
        生成每周舆情分析报告
        
        Returns:
            报告数据
        """
        start_time = time.time()
        self.logger.info("开始生成每周报告...")
        
        try:
            # 调用每日报告生成函数，设置天数为7
            self.logger.info("正在生成基础报告数据 (days=7)...")
            report_data = self.generate_daily_report(days=7)
            
            if 'error' in report_data:
                return report_data
            
            # 修改标题
            end_date = datetime.now()
            start_date = end_date - timedelta(days=7)
            report_data['title'] = f"每周舆情分析报告 - {start_date.strftime('%Y年%m月%d日')} 至 {end_date.strftime('%Y年%m月%d日')}"
            self.logger.info(f"报告标题: {report_data['title']}")
            
            # 添加周报特有的数据
            # 1. 获取一周内的热点事件
            self.logger.info("正在获取热点事件...")
            hot_events = self.hotspot_detector.get_hot_events(days=7)
            report_data['hot_events'] = hot_events[:5] if hot_events else []  # 只取前5个热点事件
            self.logger.info(f"热点事件获取完成: {len(report_data['hot_events'])}个事件")
            
            # 2. 获取一周内的趋势关键词
            self.logger.info("正在获取趋势关键词...")
            trending_keywords = self.hotspot_detector.get_trending_keywords(days=7)
            report_data['trending_keywords'] = trending_keywords[:15] if trending_keywords else []  # 只取前15个趋势关键词
            self.logger.info(f"趋势关键词获取完成: {len(report_data['trending_keywords'])}个关键词")
            
            # 3. 获取地域情感分布
            self.logger.info("正在获取地域情感分布...")
            location_sentiment = self.location_analyzer.analyze_location_sentiment(days=7)
            report_data['location_sentiment'] = location_sentiment
            self.logger.info("地域情感分布获取完成")
            
            elapsed_time = time.time() - start_time
            self.logger.info(f"每周报告生成完成，耗时: {elapsed_time:.2f}秒")
            
            return report_data
            
        except Exception as e:
            self.logger.error(f"生成每周报告失败: {str(e)}", exc_info=True)
            return {'error': str(e)}
    
    def generate_monthly_report(self) -> Dict:
        """
        生成每月舆情分析报告
        
        Returns:
            报告数据
        """
        start_time = time.time()
        self.logger.info("开始生成每月报告...")
        
        try:
            # 调用每日报告生成函数，设置天数为30
            self.logger.info("正在生成基础报告数据 (days=30)...")
            report_data = self.generate_daily_report(days=30)
            
            if 'error' in report_data:
                return report_data
            
            # 修改标题
            end_date = datetime.now()
            start_date = end_date - timedelta(days=30)
            report_data['title'] = f"每月舆情分析报告 - {start_date.strftime('%Y年%m月')} 至 {end_date.strftime('%Y年%m月')}"
            self.logger.info(f"报告标题: {report_data['title']}")
            
            # 添加月报特有的数据
            # 1. 获取月度热点事件聚类
            self.logger.info("正在获取热点事件聚类...")
            clusters = self.hotspot_detector.cluster_articles(days=30)
            report_data['event_clusters'] = clusters[:10] if clusters else []  # 只取前10个聚类
            self.logger.info(f"热点事件聚类获取完成: {len(report_data['event_clusters'])}个聚类")
            
            # 2. 获取月度情感变化趋势
            # 这部分数据已经在每日报告中获取
            
            # 3. 获取地域情感对比
            self.logger.info("正在获取地域情感分布...")
            location_sentiment = self.location_analyzer.analyze_location_sentiment(days=30)
            report_data['location_sentiment'] = location_sentiment
            self.logger.info("地域情感分布获取完成")
            
            # 4. 生成地图数据
            self.logger.info("正在生成地图数据...")
            map_data = self.location_analyzer.generate_map_data(days=30)
            report_data['map_data'] = map_data
            self.logger.info("地图数据生成完成")
            
            elapsed_time = time.time() - start_time
            self.logger.info(f"每月报告生成完成，耗时: {elapsed_time:.2f}秒")
            
            return report_data
            
        except Exception as e:
            self.logger.error(f"生成每月报告失败: {str(e)}", exc_info=True)
            return {'error': str(e)}
    
    def generate_custom_report(self, start_date: datetime, end_date: datetime, 
                              include_sections: List[str] = None) -> Dict:
        """
        生成自定义时间范围的舆情分析报告
        
        Args:
            start_date: 开始日期
            end_date: 结束日期
            include_sections: 包含的报告部分，如果为None则包含所有部分
            
        Returns:
            报告数据
        """
        start_time = time.time()
        self.logger.info(f"开始生成自定义报告: {start_date.strftime('%Y-%m-%d')} 至 {end_date.strftime('%Y-%m-%d')}...")
        
        if include_sections:
            self.logger.info(f"包含的部分: {', '.join(include_sections)}")
        else:
            self.logger.info("包含所有部分")
        
        try:
            # 计算天数
            days = (end_date - start_date).days + 1
            self.logger.info(f"报告时间范围: {days}天")
            
            # 报告标题
            title = f"自定义舆情分析报告 - {start_date.strftime('%Y年%m月%d日')} 至 {end_date.strftime('%Y年%m月%d日')}"
            self.logger.info(f"报告标题: {title}")
            
            # 初始化报告数据
            report_data = {
                'title': title,
                'generation_time': datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
                'time_range': {
                    'start': start_date.strftime('%Y-%m-%d'),
                    'end': end_date.strftime('%Y-%m-%d'),
                    'days': days
                }
            }
            
            # 根据包含的部分生成报告数据
            if include_sections is None or 'statistics' in include_sections:
                self.logger.info("正在获取基本统计数据...")
                report_data['statistics'] = self._get_basic_statistics(days)
                self.logger.info("基本统计数据获取完成")
            
            if include_sections is None or 'sentiment_analysis' in include_sections:
                self.logger.info("正在获取情感分析数据...")
                report_data['sentiment_analysis'] = self.trend_analyzer.analyze_sentiment_trend(days)
                self.logger.info("情感分析数据获取完成")
            
            if include_sections is None or 'hot_topics' in include_sections:
                self.logger.info("正在获取热点话题...")
                hot_topics = self.hotspot_detector.detect_hot_topics(days)
                report_data['hot_topics'] = hot_topics[:10] if hot_topics else []
                self.logger.info(f"热点话题获取完成: {len(report_data['hot_topics'])}个话题")
            
            if include_sections is None or 'location_distribution' in include_sections:
                self.logger.info("正在获取地域分布...")
                report_data['location_distribution'] = self.location_analyzer.analyze_location_distribution(days)
                self.logger.info("地域分布获取完成")
            
            if include_sections is None or 'source_distribution' in include_sections:
                self.logger.info("正在获取媒体来源分布...")
                report_data['source_distribution'] = self.trend_analyzer.analyze_source_distribution(days)
                self.logger.info("媒体来源分布获取完成")
            
            if include_sections is None or 'volume_trend' in include_sections:
                self.logger.info("正在获取声量趋势...")
                report_data['volume_trend'] = self.trend_analyzer.analyze_volume_trend(days)
                self.logger.info("声量趋势获取完成")
            
            if include_sections is None or 'hot_events' in include_sections:
                self.logger.info("正在获取热点事件...")
                hot_events = self.hotspot_detector.get_hot_events(days)
                report_data['hot_events'] = hot_events[:5] if hot_events else []
                self.logger.info(f"热点事件获取完成: {len(report_data['hot_events'])}个事件")
            
            if include_sections is None or 'trending_keywords' in include_sections:
                self.logger.info("正在获取趋势关键词...")
                trending_keywords = self.hotspot_detector.get_trending_keywords(days)
                report_data['trending_keywords'] = trending_keywords[:15] if trending_keywords else []
                self.logger.info(f"趋势关键词获取完成: {len(report_data['trending_keywords'])}个关键词")
            
            if include_sections is None or 'location_sentiment' in include_sections:
                self.logger.info("正在获取地域情感分布...")
                report_data['location_sentiment'] = self.location_analyzer.analyze_location_sentiment(days)
                self.logger.info("地域情感分布获取完成")
            
            if include_sections is None or 'map_data' in include_sections:
                self.logger.info("正在生成地图数据...")
                report_data['map_data'] = self.location_analyzer.generate_map_data(days)
                self.logger.info("地图数据生成完成")
            
            elapsed_time = time.time() - start_time
            self.logger.info(f"自定义报告生成完成，耗时: {elapsed_time:.2f}秒")
            
            return report_data
            
        except Exception as e:
            self.logger.error(f"生成自定义报告失败: {str(e)}", exc_info=True)
            return {'error': str(e)}
    
    def export_report_to_html(self, report_data: Dict, output_path: str) -> bool:
        """
        将报告导出为HTML文件
        
        Args:
            report_data: 报告数据
            output_path: 输出文件路径
            
        Returns:
            是否导出成功
        """
        start_time = time.time()
        self.logger.info(f"开始导出HTML报告: {output_path}")
        
        try:
            # 设置Jinja2模板环境
            template_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 
                                       '../../templates/reports')
            env = Environment(loader=FileSystemLoader(template_dir))
            template = env.get_template('report_template.html')
            
            # 渲染HTML
            self.logger.info("正在渲染HTML内容...")
            html_content = template.render(report=report_data)
            
            # 保存HTML文件
            self.logger.info(f"正在保存HTML文件: {output_path}")
            with open(output_path, 'w', encoding='utf-8') as f:
                f.write(html_content)
            
            elapsed_time = time.time() - start_time
            self.logger.info(f"HTML报告导出成功，文件大小: {os.path.getsize(output_path)/1024:.2f}KB，耗时: {elapsed_time:.2f}秒")
            return True
            
        except Exception as e:
            self.logger.error(f"导出HTML报告失败: {str(e)}", exc_info=True)
            return False
    
    def export_report_to_pdf(self, report_data: Dict, output_path: str) -> bool:
        """
        将报告导出为PDF文件
        
        Args:
            report_data: 报告数据
            output_path: 输出文件路径
            
        Returns:
            是否导出成功
        """
        start_time = time.time()
        self.logger.info(f"开始导出PDF报告: {output_path}")
        
        try:
            # 先导出为HTML
            html_path = output_path.replace('.pdf', '.html')
            self.logger.info(f"正在生成临时HTML文件: {html_path}")
            
            if not self.export_report_to_html(report_data, html_path):
                self.logger.error("生成临时HTML文件失败")
                return False
            
            # 使用pdfkit将HTML转换为PDF
            options = {
                'page-size': 'A4',
                'encoding': 'UTF-8',
                'margin-top': '1cm',
                'margin-right': '1cm',
                'margin-bottom': '1cm',
                'margin-left': '1cm',
                'title': report_data['title']
            }
            
            self.logger.info("正在将HTML转换为PDF...")
            pdfkit.from_file(html_path, output_path, options=options)
            
            # 删除临时HTML文件
            if os.path.exists(html_path):
                self.logger.info(f"正在删除临时HTML文件: {html_path}")
                os.remove(html_path)
            
            elapsed_time = time.time() - start_time
            self.logger.info(f"PDF报告导出成功，文件大小: {os.path.getsize(output_path)/1024:.2f}KB，耗时: {elapsed_time:.2f}秒")
            return True
            
        except Exception as e:
            self.logger.error(f"导出PDF报告失败: {str(e)}", exc_info=True)
            return False
    
    def export_report_to_json(self, report_data: Dict, output_path: str) -> bool:
        """
        将报告导出为JSON文件
        
        Args:
            report_data: 报告数据
            output_path: 输出文件路径
            
        Returns:
            是否导出成功
        """
        start_time = time.time()
        self.logger.info(f"开始导出JSON报告: {output_path}")
        
        try:
            # 处理不可序列化的数据
            self.logger.info("正在处理JSON序列化...")
            processed_data = self._process_for_json(report_data)
            
            # 保存JSON文件
            self.logger.info(f"正在保存JSON文件: {output_path}")
            with open(output_path, 'w', encoding='utf-8') as f:
                json.dump(processed_data, f, ensure_ascii=False, indent=4)
            
            elapsed_time = time.time() - start_time
            self.logger.info(f"JSON报告导出成功，文件大小: {os.path.getsize(output_path)/1024:.2f}KB，耗时: {elapsed_time:.2f}秒")
            return True
            
        except Exception as e:
            self.logger.error(f"导出JSON报告失败: {str(e)}", exc_info=True)
            return False
    
    def _get_basic_statistics(self, days: int) -> Dict:
        """
        获取基本统计数据
        
        Args:
            days: 时间范围（天）
            
        Returns:
            基本统计数据
        """
        try:
            # 获取时间范围
            start_date = datetime.now() - timedelta(days=days)
            
            # 获取文章总数
            total_articles = Article.query.filter(Article.collected_at >= start_date).count()
            total_posts = SocialMediaPost.query.filter(SocialMediaPost.collected_at >= start_date).count()
            
            self.logger.debug(f"文章数: {total_articles}, 社交媒体帖子数: {total_posts}")
            
            # 获取情感分布
            positive_articles = Article.query.filter(
                Article.collected_at >= start_date,
                Article.sentiment == '正面'
            ).count()
            
            negative_articles = Article.query.filter(
                Article.collected_at >= start_date,
                Article.sentiment == '负面'
            ).count()
            
            neutral_articles = Article.query.filter(
                Article.collected_at >= start_date,
                Article.sentiment == '中性'
            ).count()
            
            positive_posts = SocialMediaPost.query.filter(
                SocialMediaPost.collected_at >= start_date,
                SocialMediaPost.sentiment == '正面'
            ).count()
            
            negative_posts = SocialMediaPost.query.filter(
                SocialMediaPost.collected_at >= start_date,
                SocialMediaPost.sentiment == '负面'
            ).count()
            
            neutral_posts = SocialMediaPost.query.filter(
                SocialMediaPost.collected_at >= start_date,
                SocialMediaPost.sentiment == '中性'
            ).count()
            
            self.logger.debug(f"情感分布 - 正面: {positive_articles + positive_posts}, 负面: {negative_articles + negative_posts}, 中性: {neutral_articles + neutral_posts}")
            
            # 获取热门关键词
            keywords = Keyword.query.order_by(Keyword.frequency.desc()).limit(10).all()
            top_keywords = [keyword.word for keyword in keywords]
            
            self.logger.debug(f"热门关键词: {', '.join(top_keywords) if top_keywords else '无'}")
            
            # 获取来源分布
            articles = Article.query.filter(Article.collected_at >= start_date).all()
            source_counter = {}
            for article in articles:
                source = article.source
                if source in source_counter:
                    source_counter[source] += 1
                else:
                    source_counter[source] = 1
            
            # 获取平台分布
            posts = SocialMediaPost.query.filter(SocialMediaPost.collected_at >= start_date).all()
            platform_counter = {}
            for post in posts:
                platform = post.platform
                if platform in platform_counter:
                    platform_counter[platform] += 1
                else:
                    platform_counter[platform] = 1
            
            # 返回统计数据
            stats = {
                'total_count': total_articles + total_posts,
                'article_count': total_articles,
                'social_post_count': total_posts,
                'sentiment_distribution': {
                    'positive': positive_articles + positive_posts,
                    'negative': negative_articles + negative_posts,
                    'neutral': neutral_articles + neutral_posts
                },
                'top_keywords': top_keywords,
                'source_distribution': source_counter,
                'platform_distribution': platform_counter
            }
            
            return stats
            
        except Exception as e:
            self.logger.error(f"获取基本统计数据失败: {str(e)}", exc_info=True)
            return {}
    
    def _process_for_json(self, data: Dict) -> Dict:
        """
        处理数据以便JSON序列化
        
        Args:
            data: 原始数据
            
        Returns:
            处理后的数据
        """
        if isinstance(data, dict):
            result = {}
            for key, value in data.items():
                if key == 'chart_image':
                    # 跳过图表图像数据
                    self.logger.debug(f"跳过图表图像数据: {key}")
                    continue
                elif isinstance(value, (dict, list)):
                    result[key] = self._process_for_json(value)
                elif isinstance(value, datetime):
                    result[key] = value.isoformat()
                else:
                    result[key] = value
            return result
        elif isinstance(data, list):
            return [self._process_for_json(item) if isinstance(item, (dict, list)) else item for item in data]
        else:
            return data 