import json
import requests
import os
from beeize.scraper import Scraper

scraper = Scraper()



proxies = {
            'http': os.getenv('PROXY_URL'),
            'https': os.getenv('PROXY_URL'),
        }  # 读取代理配置

def fetch_data(page_num, page_size, club_bbs_id):
    url = f"https://club.autohome.com.cn/frontapi/data/page/club_get_topics_list?page_num={page_num}&page_size={page_size}&club_bbs_type=c&club_bbs_id={club_bbs_id}&club_order_type=1"
    response = requests.get(url,proxies=proxies)
    if response.status_code == 200:
        try:
            return response.json()
        except json.JSONDecodeError:
            print("Error decoding JSON response")
            return None
    else:
        print(f"Failed to retrieve data: {response.status_code}")
        return None

def extract_info(data):
    items = data.get('result', {}).get('items', [])
    extracted_data = []
    for item in items:
        data_item = {
            "club_bbs_name": item.get("club_bbs_name", ""),
            "publish_time": item.get("publish_time", ""),
            "ip_province_name": item.get("ip_province_name", ""),
            "club_jinghua_summary": item.get("club_jinghua_summary", ""),
            "summary": item.get("summary", ""),
            "title": item.get("title", ""),
            "ip_city_name": item.get("ip_city_name", ""),
            "club_bbs_id": item.get("club_bbs_id", "")
        }
        print(data_item)
        extracted_data.append(data_item)
        scraper.push_data(data_item)
    return extracted_data

def main():
    try:
        total_pages = int(os.getenv('PAGENUM'))  # Total number of pages to scrape
        page_size = int(os.getenv('PAGESIZE'))
        club_bbs_id = int(os.getenv('CLUBBBSID'))
        pagestart = int(os.getenv('PAGESTART'))
        # total_pages = 1
        # page_size = 1
        # club_bbs_id = 110
    except ValueError:
        print("Error: Environment variables should be integers.")
        return

    all_data = []

    for current_page in range(1, total_pages + 1):
        current_page=current_page+pagestart
        data = fetch_data(current_page, page_size, club_bbs_id)
        if data:
            extracted_data = extract_info(data)
            if not extracted_data:
                break
            all_data.extend(extracted_data)
        else:
            break

    for item in all_data:
        print(item)
     

if __name__ == "__main__":
    main()
