import axios from 'axios'
import * as fs from 'fs'
import { SitemapStream } from 'sitemap'

const smStream = new SitemapStream({ hostname: 'https://bittopup.com/' })

const BASE_URL = 'https://api.bittopup.com/naicha'
const BITTOPUP_BASE_URL = 'https://bittopup.com'
const TIMEOUT = 1000000
const PAGINATION_LIMIT = 55
const WITH_OUT_SITEMAP = false
const JUST_SITEMAP = true
let IS_STOP = false //直接设置false则不走中断
const STOP_PATH = 'goods/Conquer-Points' //如果中断了可以从这开始

const LANGUAGES_I18N = [
  { value: '', code: 'en', file: 'en.json', reqCode: 'en' },
  { value: 'zh', code: 'zh', file: 'zh.json', reqCode: 'zh-CN' },
  { value: 'id', code: 'id', file: 'id.json', reqCode: 'id' },
  { value: 'ko', code: 'ko', file: 'ko.json', reqCode: 'ko' },
  { value: 'ru', code: 'ru', file: 'ru.json', reqCode: 'ru' },
].map(({ value }) => value)
function getUrl(path) {
  console.log(path)
  if (!WITH_OUT_SITEMAP) smStream.write({ url: path, changefreq: 'weekly', lastmod: '2024-12-09' })
  return `${BITTOPUP_BASE_URL}/${path}`
}

async function getAxiosData(url, params = {}) {
  try {
    const response = await axios.get(url, { params, timeout: TIMEOUT })
    return response.data.data
  } catch (error) {
    console.error(`Error fetching data from ${url}:`, error)
    return null
  }
}

async function crawlPages(paths) {
  if (STOP_PATH && IS_STOP) {
    IS_STOP = !paths.includes(STOP_PATH)
  }
  if (IS_STOP) return
  // 真爬还是假爬
  const promises = JUST_SITEMAP
    ? paths.map((path) => getUrl(path))
    : paths.map((path) =>
        axios.get(getUrl(path), { timeout: TIMEOUT }).catch((e) => {
          console.log(e)
        }),
      )
  return Promise.allSettled(promises)
}

async function processGoodsList() {
  const goodsList = await getAxiosData(`${BASE_URL}/open/allGoods?parentCategoryId=5`)
  if (!goodsList) return

  for (const { urlName } of goodsList) {
    if (!urlName) continue
    const paths = LANGUAGES_I18N.map((lang) => (lang ? `${lang}/goods` : 'goods')).map(
      (suffix) => `${suffix}/${urlName}`,
    )
    await crawlPages(paths)
  }
}

async function processArticleList() {
  const { data: articleList } = await getAxiosData(`${BASE_URL}/app/news/newsByCategory/1/9999999`)
  if (!articleList) return

  for (const { urlName } of articleList) {
    const paths = LANGUAGES_I18N.map((lang) => (lang ? `${lang}/article` : 'article')).map(
      (suffix) => `${suffix}/${urlName}`,
    )
    await crawlPages(paths)
  }
}

async function processCategories() {
  const categories = await getAxiosData(`${BASE_URL}/app/goods/categoryTree`)
  if (!categories) return

  for (const { id, subCategories } of categories) {
    await processCategory(id, true)
    for (const { id: subId } of subCategories) {
      await processCategory(id, false, subId)
    }
  }
}

async function processCategory(categoryId, isParentCategory, subCategoryId = null) {
  const { totalElements } = await getAxiosData(`${BASE_URL}/app/goods/getGoodsByCategoryId/1/1`, {
    categoryId: subCategoryId || categoryId,
    isParentCategory: isParentCategory ? 1 : 0,
  })
  if (!totalElements) return

  const pages = Math.ceil(totalElements / PAGINATION_LIMIT)
  for (let i = 0; i < pages; i++) {
    const path = subCategoryId ? `category/${categoryId}/${subCategoryId}/${i + 1}` : `category/${categoryId}/${i + 1}`
    const paths = LANGUAGES_I18N.map((suffix) => `${suffix}/${path}`)
    await crawlPages(paths)
  }
}
async function processGoodsReviewsList() {
  const goodsList = await getAxiosData(`${BASE_URL}/open/allGoods?isPure=1`)
  if (!goodsList) return

  for (const { urlName } of goodsList) {
    if (!urlName) continue
    const paths = LANGUAGES_I18N.map((lang) => (lang ? `${lang}/reviews` : 'reviews')).map(
      (suffix) => `${suffix}/${urlName}`,
    )
    await crawlPages(paths)
  }
}
;(async () => {
  // await crawlPages(LANGUAGES_I18N) // For the home pages
  // await processCategories()
  await processArticleList()
  // await processGoodsReviewsList()
  if (!WITH_OUT_SITEMAP) {
    smStream.pipe(fs.createWriteStream('./public/article-sitemap.xml'))
    smStream.end()
  }
  console.log('Data crawling completed.')
})()
