#!/usr/bin/env python3
"""
UN General Assembly Resolution Crawler
Crawls UN resolutions, filters lopsided votes, and stores in SQLite database.
"""

import sqlite3
import requests
from bs4 import BeautifulSoup
import re
import time
import csv
import subprocess
from urllib.parse import urljoin
import json
from datetime import datetime

class UNResolutionCrawler:
    def __init__(self, db_path="un_resolutions.db"):
        self.db_path = db_path
        self.session = requests.Session()
        self.session.headers.update({
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'
        })
        self.base_url = "https.org"
        self.voting_threshold = 10
        self.setup_database()

    def setup_database(self):
        """Create SQLite database and tables"""
        conn = sqlite3.connect(self.db_path)
        cursor = conn.cursor()

        cursor.execute('''
            CREATE TABLE IF NOT EXISTS resolutions (
                id INTEGER PRIMARY KEY AUTOINCREMENT,
                resolution_number TEXT UNIQUE,
                title TEXT,
                date TEXT,
                topic_category TEXT,
                votes_for INTEGER,
                votes_against INTEGER,
                votes_abstain INTEGER,
                is_lopsided BOOLEAN,
                minority_countries TEXT,
                minority_count INTEGER,
                majority_side TEXT,
                url TEXT,
                abstain_countries TEXT,
                created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
            )
        ''')

        # Check if abstain_countries column exists
        cursor.execute("PRAGMA table_info(resolutions)")
        columns = [column[1] for column in cursor.fetchall()]
        if 'abstain_countries' not in columns:
            cursor.execute("ALTER TABLE resolutions ADD COLUMN abstain_countries TEXT")

        conn.commit()
        conn.close()

    def get_resolution_list(self, session=79):
        """Get list of resolutions for a given session"""
        url = "https://digitallibrary.un.org/search?ln=en&cc=Voting+Data&p={}".format(session)

        try:
            result = subprocess.run(['curl', '-s', url], capture_output=True, text=True, check=True)
            soup = BeautifulSoup(result.stdout, 'html.parser')
            resolutions = []

            # Find resolution links
            resolution_links = soup.find_all('a', href=re.compile(r'/record/'))

            for link in resolution_links:
                href = link.get('href')
                title = link.get_text(strip=True)

                if href and title:
                    resolutions.append({
                        'url': urljoin("https://digitallibrary.un.org", href),
                        'title': title,
                        'number': self.extract_resolution_number(href)
                    })

            return resolutions

        except Exception as e:
            print("Error fetching resolution list: {}".format(e))
            return []

    def extract_resolution_number(self, url):
        """Extract resolution number from URL"""
        match = re.search(r'/record/(\d+)', url)
        return match.group(1) if match else "Unknown"

    def get_voting_data(self, resolution_url):
        """Extract voting data from a resolution page"""
        try:
            response = self.session.get(resolution_url, timeout=30)
            response.raise_for_status()

            soup = BeautifulSoup(response.content, 'html.parser')

            # Look for voting data
            voting_data = self.parse_voting_table(soup)

            if not voting_data:
                voting_data = self.parse_voting_text(soup)

            return voting_data

        except Exception as e:
            print("Error fetching voting data from {}: {}".format(resolution_url, e))
            return None

    def parse_voting_table(self, soup):
        """Parse voting data from HTML tables"""
        # Look for tables with voting information
        tables = soup.find_all('table')

        for table in tables:
            rows = table.find_all('tr')
            votes = {'for': 0, 'against': 0, 'abstain': 0, 'countries': {'for': [], 'against': [], 'abstain': []}}

            for row in rows:
                cells = row.find_all(['td', 'th'])
                if len(cells) >= 2:
                    label = cells[0].get_text(strip=True).lower()

                    if 'yes' in label or 'for' in label or 'favour' in label:
                        match = re.search(r'\d+', cells[1].get_text())
                        if match:
                            votes['for'] = int(match.group())
                        if len(cells) > 2:
                            votes['countries']['for'] = self.parse_country_list(cells[2].get_text())

                    elif 'no' in label or 'against' in label or 'oppose' in label:
                        match = re.search(r'\d+', cells[1].get_text())
                        if match:
                            votes['against'] = int(match.group())
                        if len(cells) > 2:
                            votes['countries']['against'] = self.parse_country_list(cells[2].get_text())

                    elif 'abstain' in label or 'abstention' in label:
                        match = re.search(r'\d+', cells[1].get_text())
                        if match:
                            votes['abstain'] = int(match.group())
                        if len(cells) > 2:
                            votes['countries']['abstain'] = self.parse_country_list(cells[2].get_text())

            if votes['for'] > 0 or votes['against'] > 0:
                return votes

        return None

    def parse_voting_text(self, soup):
        """Parse voting data from text content"""
        text = soup.get_text()

        # Look for voting patterns in text
        vote_patterns = [
            r'(\d+)\s*(?:in favour|for|yes|yea)',
            r'(\d+)\s*(?:against|no|nay)',
            r'(\d+)\s*(?:abstaining|abstention|abstain)'
        ]

        votes = {'for': 0, 'against': 0, 'abstain': 0, 'countries': {'for': [], 'against': [], 'abstain': []}}

        for pattern in vote_patterns:
            matches = re.findall(pattern, text, re.IGNORECASE)
            if matches:
                if 'for' in pattern.lower() or 'favour' in pattern.lower() or 'yes' in pattern.lower():
                    votes['for'] = int(matches[0])
                elif 'against' in pattern.lower() or 'no' in pattern.lower():
                    votes['against'] = int(matches[0])
                elif 'abstain' in pattern.lower():
                    votes['abstain'] = int(matches[0])

        return votes if votes['for'] > 0 or votes['against'] > 0 else None

    def parse_country_list(self, country_text):
        """Parse list of countries from text"""
        countries = []

        # Split by common separators
        for separator in [',', ';', '\n']:
            if separator in country_text:
                countries = [c.strip() for c in country_text.split(separator) if c.strip()]
                break

        # Clean up country names
        cleaned_countries = []
        for country in countries:
            # Remove common prefixes/suffixes
            country = re.sub(r'^(The\s+|Republic\s+of\s+)', '', country, flags=re.IGNORECASE)
            country = re.sub(r'(\s+Republic|\s+State)$', '', country, flags=re.IGNORECASE)
            country = country.strip()

            if country and len(country) > 2:
                cleaned_countries.append(country)

        return cleaned_countries

    def classify_topic(self, title, content=""):
        """Classify resolution topic category"""
        text = (title + " " + content).lower()

        topic_keywords = {
            'Peace & Security': ['peace', 'security', 'conflict', 'war', 'military', 'terrorism', 'disarmament'],
            'Human Rights': ['human rights', 'refugee', 'asylum', 'discrimination', 'torture', 'gender'],
            'Development': ['development', 'poverty', 'education', 'health', 'sustainable', 'millennium'],
            'Environment': ['climate', 'environment', 'biodiversity', 'pollution', 'sustainability'],
            'International Law': ['court', 'treaty', 'convention', 'law', 'legal', 'jurisdiction'],
            'Humanitarian': ['humanitarian', 'aid', 'relief', 'disaster', 'emergency'],
            'Political': 'political'
        }

        for category, keywords in topic_keywords.items():
            if isinstance(keywords, list):
                if any(keyword in text for keyword in keywords):
                    return category
            else:
                if keywords in text:
                    return category

        return 'Other'

    def is_lopsided_vote(self, votes):
        """Check if vote is lopsided (minority < threshold)"""
        minority_count = min(votes['for'], votes['against'])
        return minority_count < self.voting_threshold

    def get_minority_countries(self, votes):
        """Get countries in the minority vote"""
        if votes['for'] < votes['against']:
            return votes['countries']['for'], 'for'
        else:
            return votes['countries']['against'], 'against'

    def save_resolution(self, resolution_data):
        """Save resolution data to database"""
        conn = sqlite3.connect(self.db_path)
        cursor = conn.cursor()

        try:
            cursor.execute('''
                INSERT OR REPLACE INTO resolutions (
                    resolution_number, title, date, topic_category, votes_for, votes_against,
                    votes_abstain, is_lopsided, minority_countries, minority_count,
                    majority_side, url, abstain_countries
                ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
            ''', (
                resolution_data['number'],
                resolution_data['title'],
                resolution_data.get('date', ''),
                resolution_data['topic_category'],
                resolution_data['votes_for'],
                resolution_data['votes_against'],
                resolution_data['votes_abstain'],
                resolution_data['is_lopsided'],
                resolution_data['minority_countries'],
                resolution_data['minority_count'],
                resolution_data['majority_side'],
                resolution_data['url'],
                resolution_data['abstain_countries']
            ))

            conn.commit()
            print("Saved resolution {}".format(resolution_data['number']))

        except Exception as e:
            print("Error saving resolution {}: {}".format(resolution_data['number'], e))
            conn.rollback()

        conn.close()

    def crawl_resolutions(self, start_year=2020, end_year=2024):
        """Main crawling function"""
        for year in range(start_year, end_year + 1):
            session = year - 1945
            print("Crawling resolutions for session {} ({})...".format(session, year))

            resolutions = self.get_resolution_list(session)

            for i, resolution in enumerate(resolutions):
                print("Processing {}/{}: {}".format(i+1, len(resolutions), resolution['title']))

                voting_data = self.get_voting_data(resolution['url'])

                if voting_data:
                    topic_category = self.classify_topic(resolution['title'])
                    is_lopsided = self.is_lopsided_vote(voting_data)

                    minority_countries, majority_side = self.get_minority_countries(voting_data)
                    minority_count = len(minority_countries)

                    resolution_data = {
                        'number': resolution['number'],
                        'title': resolution['title'],
                        'url': resolution['url'],
                        'topic_category': topic_category,
                        'votes_for': voting_data['for'],
                        'votes_against': voting_data['against'],
                        'votes_abstain': voting_data['abstain'],
                        'is_lopsided': is_lopsided,
                        'minority_countries': json.dumps(minority_countries),
                        'minority_count': minority_count,
                        'majority_side': majority_side,
                        'date': '',
                        'abstain_countries': json.dumps(voting_data['countries']['abstain'])
                    }

                    self.save_resolution(resolution_data)

                # Rate limiting
                time.sleep(1)

    def export_to_csv(self, filename="un_resolutions.csv"):
        """Export database to CSV file"""
        conn = sqlite3.connect(self.db_path)
        cursor = conn.cursor()

        cursor.execute('''
            SELECT resolution_number, title, date, topic_category, votes_for, votes_against,
                   votes_abstain, is_lopsided, minority_countries, minority_count,
                   majority_side, url, abstain_countries
            FROM resolutions
            ORDER BY date DESC
        ''')

        with open(filename, 'w', newline='', encoding='utf-8') as csvfile:
            writer = csv.writer(csvfile)
            writer.writerow(['Resolution Number', 'Title', 'Date', 'Topic Category',
                           'Votes For', 'Votes Against', 'Votes Abstain', 'Is Lopsided',
                           'Minority Countries', 'Minority Count', 'Majority Side', 'URL',
                           'Abstain Countries', 'Abstain Countries Count'])

            for row in cursor.fetchall():
                # Parse minority countries from JSON
                minority_countries = json.loads(row[8]) if row[8] and isinstance(row[8], str) else []
                minority_countries_str = '; '.join(minority_countries)

                # Parse abstain countries from JSON
                abstain_countries = json.loads(row[12]) if row[12] and isinstance(row[12], str) else []
                abstain_countries_str = '; '.join(abstain_countries)
                abstain_countries_count = len(abstain_countries)

                writer.writerow([
                    row[0], row[1], row[2], row[3], row[4], row[5],
                    row[6], row[7], minority_countries_str, row[9], row[10], row[11],
                    abstain_countries_str, abstain_countries_count
                ])

        conn.close()
        print("Exported lopsided resolutions to {}".format(filename))

def main():
    crawler = UNResolutionCrawler()

    print("Starting UN General Assembly Resolution Crawler...")
    print("Voting threshold: {} votes".format(crawler.voting_threshold))

    # Crawl resolutions
    crawler.crawl_resolutions(start_year=1946, end_year=2024)

    # Export to CSV
    crawler.export_to_csv()

    print("Crawling completed!")

if __name__ == "__main__":
    main()