import facebook
from datetime import datetime, timedelta
from .base import BaseCrawler

class FacebookCrawler(BaseCrawler):
    """Facebook爬虫实现"""
    
    def __init__(self, task):
        super().__init__(task)
        self.is_running = False
        
        # 从任务参数中获取配置
        self.keywords = self.task.parameters.get('keywords', [])
        self.start_date = self.task.parameters.get('start_date')
        self.end_date = self.task.parameters.get('end_date')
        self.max_posts = self.task.parameters.get('max_posts', 100)
        
        # 设置认证信息
        self.credentials = self.task.data_source.credentials
        self.graph = self._setup_api()
    
    def _setup_api(self):
        """设置Facebook Graph API客户端"""
        try:
            access_token = self.credentials.get('access_token')
            return facebook.GraphAPI(access_token=access_token, version="3.1")
        except Exception as e:
            self.update_progress(0, f"Facebook API设置失败: {str(e)}")
            return None
    
    def validate_credentials(self):
        """验证Facebook API凭证"""
        try:
            if not self.graph:
                return False
            # 尝试获取用户信息来验证凭证
            self.graph.get_object('me')
            return True
        except Exception:
            return False
    
    def search_posts(self, keyword):
        """搜索Facebook帖子"""
        try:
            # 使用Facebook Graph API搜索公开帖子
            search_results = self.graph.search(
                type='post',
                q=keyword,
                limit=self.max_posts,
                fields='id,message,created_time,shares,reactions.summary(true),'
                      'comments.summary(true),from'
            )
            
            return search_results.get('data', [])
        except Exception as e:
            self.update_progress(
                self.task.progress,
                f"搜索Facebook帖子时出错: {str(e)}"
            )
            return []
    
    def get_post_details(self, post_id):
        """获取帖子详细信息"""
        try:
            return self.graph.get_object(
                post_id,
                fields='id,message,created_time,shares,reactions.summary(true),'
                      'comments.summary(true),from'
            )
        except Exception:
            return None
    
    def parse_post(self, post):
        """解析Facebook帖子数据"""
        return {
            'id': post.get('id'),
            'message': post.get('message'),
            'created_time': post.get('created_time'),
            'shares': post.get('shares', {}).get('count', 0),
            'reactions': {
                'total': post.get('reactions', {}).get('summary', {}).get('total_count', 0)
            },
            'comments': {
                'total': post.get('comments', {}).get('summary', {}).get('total_count', 0)
            },
            'from': {
                'id': post.get('from', {}).get('id'),
                'name': post.get('from', {}).get('name')
            }
        }
    
    def start(self):
        """开始爬取数据"""
        if not self.validate_credentials():
            self.complete_task(success=False)
            self.update_progress(0, "Facebook API验证失败")
            return
        
        self.is_running = True
        total_processed = 0
        
        try:
            for keyword in self.keywords:
                if not self.is_running:
                    break
                
                posts = self.search_posts(keyword)
                for post in posts:
                    if not self.is_running:
                        break
                    
                    # 获取完整的帖子信息
                    post_details = self.get_post_details(post['id'])
                    if post_details:
                        parsed_post = self.parse_post(post_details)
                        self.save_data(
                            data=parsed_post,
                            metadata={
                                'keyword': keyword,
                                'crawled_at': datetime.now().isoformat()
                            }
                        )
                        
                        total_processed += 1
                        progress = min(
                            int((total_processed / (len(self.keywords) * self.max_posts)) * 100),
                            99
                        )
                        self.update_progress(progress)
            
            if self.is_running:
                self.complete_task(success=True)
                self.update_progress(100)
        except Exception as e:
            self.update_progress(
                self.task.progress,
                f"爬取过程中出错: {str(e)}"
            )
            self.complete_task(success=False)
        finally:
            self.is_running = False
    
    def stop(self):
        """停止爬取数据"""
        self.is_running = False 