package com.zhava.crawler.application.service;

import com.zhava.crawler.client.api.CrawlerApi;
import com.zhava.crawler.client.request.CrawlerRequest;
import com.zhava.crawler.client.request.PaginatedCrawlerRequest;
import com.zhava.crawler.client.response.CrawlerResponse;
import com.zhava.crawler.client.response.PaginatedCrawlerResponse;
import com.zhava.crawler.domain.enums.OutputFormatEnum;
import com.zhava.crawler.domain.gateway.CrawlerGateway;
import com.zhava.crawler.domain.model.Crawler;
import com.zhava.crawler.domain.model.PaginatedCrawler;
import com.zhava.crawler.infrastructure.mapper.CrawlerMapper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

/**
 * 爬虫服务接口实现
 *
 * @author zhaxiang
 */
@Service
public class CrawlerApiImpl implements CrawlerApi {

    @Autowired
    private CrawlerGateway crawlerGateway;

    @Override
    public CrawlerResponse crawl(CrawlerRequest request) {
        // 参数校验
        if (request.getSourceUrl() == null || request.getSourceUrl().isEmpty()) {
            return createErrorResponse("URL不能为空");
        }

        // 使用 MapStruct 将请求转换为领域模型
        Crawler crawler = CrawlerMapper.INSTANCE.requestToCrawler(request);

        // 执行爬取
        crawler = crawlerGateway.executeCrawling(crawler);

        // 根据输出格式转换数据
        if (crawler.getExtractedData() != null) {
            Object transformedData = crawlerGateway.transformOutput(crawler, crawler.getOutputFormat());
            crawler.setExtractedData(transformedData);
        }

        // 使用 MapStruct 将领域模型转换为响应
        return CrawlerMapper.INSTANCE.crawlerToResponse(crawler);
    }
    
    @Override
    public PaginatedCrawlerResponse crawlPaginated(PaginatedCrawlerRequest request) {
        // 参数校验
        if (request.getSourceUrl() == null || request.getSourceUrl().isEmpty()) {
            return createPaginatedErrorResponse("URL不能为空");
        }
        
        if (request.getPageParameterName() == null || request.getPageParameterName().isEmpty()) {
            return createPaginatedErrorResponse("页码参数名不能为空");
        }
        
        if (request.getStartPage() > request.getEndPage()) {
            return createPaginatedErrorResponse("起始页码不能大于结束页码");
        }
        
        // 使用 MapStruct 将请求转换为领域模型
        PaginatedCrawler crawler = CrawlerMapper.INSTANCE.requestToPaginatedCrawler(request);
        
        // 执行分页爬取
        crawler = crawlerGateway.executePaginatedCrawling(crawler);
        
        // 使用 MapStruct 将领域模型转换为响应
        return CrawlerMapper.INSTANCE.paginatedCrawlerToResponse(crawler);
    }

    /**
     * 创建错误响应
     *
     * @param errorMessage 错误信息
     * @return 爬虫响应对象
     */
    private CrawlerResponse createErrorResponse(String errorMessage) {
        CrawlerResponse response = new CrawlerResponse();
        response.setStatusCode(400);
        response.setErrorMessage(errorMessage);
        return response;
    }
    
    /**
     * 创建分页爬取错误响应
     *
     * @param errorMessage 错误信息
     * @return 分页式爬虫响应对象
     */
    private PaginatedCrawlerResponse createPaginatedErrorResponse(String errorMessage) {
        PaginatedCrawlerResponse response = new PaginatedCrawlerResponse();
        response.setStatusCode(400);
        response.setErrorMessage(errorMessage);
        return response;
    }
} 