import { defineStore } from 'pinia'
import { ref } from 'vue'
import {
    getCrawlerRules,
    createCustomCrawlerTask,
    getCrawlerTasks,
    stopCrawlerTasks,
    deleteCrawlerTask
} from '@/api/crawler'

export const useTaskStore = defineStore('crawlerTask', () => {
    const loading = ref(false)
    const currentPage = ref(0)
    const pageSize = ref(10)
    const total = ref(0)
    const taskId = ref('')
    // 表单数据
    const crawlerForm = ref({
        name: '',
        ruleId: '',
        ruleDescription: '',
        autoPublish: true,
        crawlImages: true,
    })

    // 爬虫规则选项
    const crawlerRules = ref([])

    // 激活状态的爬虫
    const activeTasks = ref([])

    // 启动自定义配置的爬虫
    async function createCrawlerTask(){
        loading.value = true
        try {
            const res = await createCustomCrawlerTask(crawlerForm.value)
            taskId.value = res.data
        } catch (error) {
            // console.log('获取爬虫列表失败')
        } finally {
            loading.value = false
        }
    }

    // 停止爬虫
    async function stopCrawlerTask(taskId) {
        loading.value = true
        try {
            const res = await stopCrawlerTasks(taskId)
            activeTasks.value = res.data
        } catch (error) {
            // console.log('获取爬虫列表失败')
        } finally {
            loading.value = false
        }
    }

    // 删除爬虫
    async function deleteCrawlerTasks(taskId) {
        loading.value = true
        try {
            const res = await deleteCrawlerTask(taskId)
            activeTasks.value = res.data
        } catch (error) {
            // console.log('获取爬虫列表失败')
        } finally {
            loading.value = false
        }
    }

    // 获取爬虫列表
    async function fetchCrawlerList() {
        loading.value = true
        try {
            const params = {
                page: currentPage.value,
                size: pageSize.value,
            }

            const res = await getCrawlerTasks(params)
            activeTasks.value = res.data.content
            total.value = res.data.totalElements
        } catch (error) {
            // console.log('获取爬虫列表失败')
        } finally {
            loading.value = false
        }
    }

    // 获取爬虫规则
    async function fetchCrawlerRules() {
        loading.value = true
        try {
            const res = await getCrawlerRules()
            crawlerRules.value = res.data
        } catch (error) {
            // console.log('获取爬虫列表失败')
        } finally {
            loading.value = false
        }
    }


    // 重置表单
    function resetForm() {
        crawlerForm.value = {
            name: '',
            ruleDescription: '',
            ruleId: '',
            autoPublish: true,
            crawlImages: true
        }
    }

    return {
        crawlerForm,
        activeTasks,
        crawlerRules,
        currentPage,
        pageSize,
        total,
        loading,
        taskId,
        resetForm,
        fetchCrawlerList,
        createCrawlerTask,
        stopCrawlerTask,
        fetchCrawlerRules,
        deleteCrawlerTasks
    }
})