// 爬虫API服务
import { apiClient } from './apiClient';

// 爬虫任务类型定义
export interface CrawlerJob {
  id: number;
  job_id: string;
  name: string;
  description: string;
  crawler_type: string;
  start_urls: string[];
  settings: Record<string, any>;
  actions: any[];
  status: string;
  result: any;
  error_message: string;
  user_id: number;
  created_at: string;
  updated_at: string;
  started_at: string;
  completed_at: string;
}

// 定时爬虫任务类型定义
export interface ScheduledCrawlerTask {
  id: number;
  name: string;
  description: string;
  task_config: Record<string, any>;
  cron_expression: string;
  enabled: boolean;
  user_id: number;
  created_at: string;
  updated_at: string;
  last_run_at: string;
  next_run_at: string;
}

// 创建爬虫任务请求类型
export interface CreateCrawlerJobRequest {
  name: string;
  description: string;
  crawler_type: string;
  start_urls: string[];
  settings?: Record<string, any>;
  actions?: any[];
}

// 创建定时任务请求类型
export interface CreateScheduledTaskRequest {
  name: string;
  description: string;
  task_config: Record<string, any>;
  cron_expression: string;
  enabled: boolean;
}

// Scrapyd状态类型定义
export interface ScrapydStatus {
  status: string;
  running: string;
  node_name: string;
  [key: string]: any;
}

// Scrapyd项目列表类型定义
export interface ScrapydProjects {
  status: string;
  projects: string[];
}

// Scrapyd爬虫列表类型定义
export interface ScrapydSpiders {
  status: string;
  spiders: string[];
}

// Scrapyd任务调度结果类型定义
export interface ScheduleScrapydJobResult {
  status: string;
  jobid?: string;
  message?: string;
}

// Scrapyd任务列表类型定义
export interface ScrapydJobs {
  status: string;
  pending: any[];
  running: any[];
  finished: any[];
}

class CrawlerApi {
  // 创建爬虫任务
  async createCrawlerJob(data: CreateCrawlerJobRequest): Promise<CrawlerJob> {
    return apiClient.post<CrawlerJob>('/crawler/jobs', data);
  }

  // 获取爬虫任务状态
  async getCrawlerJob(jobId: string): Promise<CrawlerJob> {
    return apiClient.get<CrawlerJob>(`/crawler/jobs/${jobId}`);
  }

  // 获取用户的所有爬虫任务
  async getCrawlerJobs(skip: number = 0, limit: number = 100): Promise<CrawlerJob[]> {
    return apiClient.get<CrawlerJob[]>('/crawler/jobs', { skip, limit });
  }

  // 创建定时爬虫任务
  async createScheduledTask(data: CreateScheduledTaskRequest): Promise<ScheduledCrawlerTask> {
    return apiClient.post<ScheduledCrawlerTask>('/crawler/scheduled-tasks', data);
  }

  // 获取用户的所有定时爬虫任务
  async getScheduledTasks(skip: number = 0, limit: number = 100): Promise<ScheduledCrawlerTask[]> {
    return apiClient.get<ScheduledCrawlerTask[]>('/crawler/scheduled-tasks', { skip, limit });
  }

  // 获取Scrapyd服务器状态
  async getScrapydStatus(): Promise<ScrapydStatus> {
    return apiClient.get<ScrapydStatus>('/crawler/scrapyd/status');
  }

  // 获取Scrapyd项目列表
  async getScrapydProjects(): Promise<ScrapydProjects> {
    return apiClient.get<ScrapydProjects>('/crawler/scrapyd/projects');
  }

  // 获取Scrapyd项目中的爬虫列表
  async getScrapydSpiders(projectName: string): Promise<ScrapydSpiders> {
    return apiClient.get<ScrapydSpiders>(`/crawler/scrapyd/spiders/${projectName}`);
  }

  // 调度Scrapyd爬虫任务
  async scheduleScrapydJob(
    projectName: string,
    spiderName: string,
    settings?: Record<string, any>
  ): Promise<ScheduleScrapydJobResult> {
    // 注意：这里使用POST请求，参数作为查询参数传递
    const params = new URLSearchParams();
    params.append('project_name', projectName);
    params.append('spider_name', spiderName);
    
    let url = `/crawler/scrapyd/jobs/schedule?${params.toString()}`;
    
    // 如果有settings，需要通过POST body传递
    if (settings) {
      return apiClient.post<ScheduleScrapydJobResult>(`/crawler/scrapyd/jobs/schedule?${params.toString()}`, { settings });
    } else {
      return apiClient.post<ScheduleScrapydJobResult>(`/crawler/scrapyd/jobs/schedule?${params.toString()}`);
    }
  }

  // 获取Scrapyd任务列表
  async getScrapydJobs(projectName?: string): Promise<ScrapydJobs> {
    const params: Record<string, string> = {};
    if (projectName) {
      params.project_name = projectName;
    }
    return apiClient.get<ScrapydJobs>('/crawler/scrapyd/jobs', params);
  }
}

export const crawlerApi = new CrawlerApi();