import requests
import pandas as pd
from bs4 import BeautifulSoup
from tkinter import messagebox

class WebScraper:
    @staticmethod
    def web_scraping(input_file, output_file, progress_bar):
        df_urls = pd.read_excel(input_file)
        urls = df_urls.iloc[:, 0].tolist()
        results = []

        headers = {
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'
        }

        total_urls = len(urls)
        progress_bar["maximum"] = total_urls

        for idx, url in enumerate(urls):
            try:
                response = requests.get(url, headers=headers)
                response.raise_for_status()
                soup = BeautifulSoup(response.content, 'html.parser')

                author_href = None
                author_wrapper = soup.find(class_='author-wrapper')
                if author_wrapper:
                    first_a_tag = author_wrapper.find('a')
                    if first_a_tag and 'href' in first_a_tag.attrs:
                        author_href = first_a_tag['href']
                        full_author_href = 'https://www.xiaohongshu.com' + author_href
                        clean_author_href = full_author_href.split('?')[0]

                username_text = None
                username = soup.find(class_='username')
                if username:
                    username_text = username.get_text(strip=True)

                city = None
                detail_title = soup.find(id='detail-title')
                if detail_title:
                    city_text = detail_title.get_text(strip=True)
                    city = city_text[:2] if len(city_text) >= 2 else None

                results.append({
                    '达人昵称': username_text,
                    '主页链接': clean_author_href,
                    '城市': city,
                    '发布链接': url
                })
            except requests.RequestException as e:
                results.append({
                    '达人昵称': None,
                    '主页链接': None,
                    '城市': None,
                    '发布链接': url
                })
            WebScraper.update_progress(progress_bar, idx + 1)

        df_results = pd.DataFrame(results)
        df_results.to_excel(output_file, index=False)

        WebScraper.show_info("完成", f"抓取完成！数据已保存到: {output_file}")

    @staticmethod
    def update_progress(progress_bar, value):
        progress_bar.after(0, progress_bar.config, {'value': value})

    @staticmethod
    def show_info(title, message):
        messagebox.showinfo(title, message)
