"""
爬取PubMed论文数据
"""
import logging
import sys
from pathlib import Path

from data_collection import PubMedCrawler
from data_collection.data_storage import DataStorage
from config import DATA_DIR

logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)


def crawl_and_save(query: str = "", max_results: int = 1000):
    """
    爬取论文并保存到数据库
    
    Args:
        query: 搜索查询词
        max_results: 最大结果数
    """
    logger.info("开始爬取PubMed论文...")
    
    # 爬取数据
    crawler = PubMedCrawler()
    papers = crawler.crawl_recent_papers(query=query, max_results=max_results)
    
    # 保存数据
    storage = DataStorage()
    saved_count = storage.save_papers(papers)
    
    logger.info(f"成功保存 {saved_count} 篇论文到数据库")
    logger.info(f"数据库位置: {storage.db_path}")


if __name__ == "__main__":
    # 默认爬取COVID-19相关论文
    import argparse
    
    parser = argparse.ArgumentParser(description="爬取PubMed论文")
    parser.add_argument("--query", type=str, default="", help="搜索查询词")
    parser.add_argument("--max-results", type=int, default=100, help="最大结果数")
    
    args = parser.parse_args()
    
    crawl_and_save(query=args.query, max_results=args.max_results)
