#!/usr/bin/env python3
# -*- coding: utf-8 -*-

"""
简单连接测试
"""

import asyncio
import sys
import os
import httpx

# 添加项目路径到sys.path
sys.path.append(os.path.dirname(os.path.abspath(__file__)))

from crawlers.douyin.web.web_crawler import DouyinWebCrawler


async def simple_connectivity_test():
    """简单连接测试"""
    print("开始连接测试...")
    
    try:
        # 测试基本HTTP连接
        async with httpx.AsyncClient() as client:
            response = await client.get("https://www.baidu.com")
            if response.status_code == 200:
                print("网络连接正常")
            else:
                print(f"网络连接异常: {response.status_code}")
                return
    except Exception as e:
        print(f"网络连接测试失败: {str(e)}")
        return
    
    # 初始化爬虫
    try:
        crawler = DouyinWebCrawler()
        print("爬虫初始化成功")
        
        # 测试获取基本配置
        print("测试配置信息...")
        print(f"Host: {crawler.headers.get('Host', '未配置')}")
        print(f"User-Agent: {crawler.headers.get('User-Agent', '未配置')[:50]}...")
        
        # 检查Cookie配置
        cookie = crawler.headers.get('Cookie', '')
        if cookie and len(cookie) > 100:
            print("Cookie配置正常")
        else:
            print("Cookie配置可能有问题")
            
        print("基本配置检查完成")
        
    except Exception as e:
        print(f"爬虫初始化失败: {str(e)}")
        import traceback
        traceback.print_exc()


if __name__ == "__main__":
    asyncio.run(simple_connectivity_test())