#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import asyncio
import sys
import os

sys.path.append(os.path.dirname(os.path.abspath(__file__)))

from crawlers.douyin.web.web_crawler import DouyinWebCrawler
from utils.export_manager import export_manager

async def simple_export():
    print("Starting export...")
    
    try:
        crawler = DouyinWebCrawler()
        keyword = "美食"
        count = 3
        
        print(f"Searching for: {keyword}")
        search_data = await crawler.fetch_videos_complete_data(keyword, count)
        
        if search_data and len(search_data.get('videos', [])) > 0:
            videos = search_data.get('videos', [])
            print(f"Found {len(videos)} videos")
            
            # Export files
            json_path = export_manager.export_to_json(search_data, keyword)
            csv_path = export_manager.export_to_csv(search_data, keyword)
            
            print(f"JSON exported: {json_path}")
            print(f"CSV exported: {csv_path}")
            print("Export completed successfully!")
        else:
            print("No videos found")
            
    except Exception as e:
        print(f"Export failed: {str(e)}")

if __name__ == "__main__":
    asyncio.run(simple_export())