import axios from 'axios'
import * as fs from 'fs'
import { SitemapStream } from 'sitemap'

const enStream = new SitemapStream({ hostname: 'https://bittopup.com/' })
const hiStream = new SitemapStream({ hostname: 'https://bittopup.com/' })
const ruStream = new SitemapStream({ hostname: 'https://bittopup.com/' })
const zhStream = new SitemapStream({ hostname: 'https://bittopup.com/' })
const zhhkStream = new SitemapStream({ hostname: 'https://bittopup.com/' })
const koStream = new SitemapStream({ hostname: 'https://bittopup.com/' })
const idStream = new SitemapStream({ hostname: 'https://bittopup.com/' })
const msStream = new SitemapStream({ hostname: 'https://bittopup.com/' })
const esStream = new SitemapStream({ hostname: 'https://bittopup.com/' })
const arStream = new SitemapStream({ hostname: 'https://bittopup.com/' })
const ptStream = new SitemapStream({ hostname: 'https://bittopup.com/' })
const frStream = new SitemapStream({ hostname: 'https://bittopup.com/' })
const jaStream = new SitemapStream({ hostname: 'https://bittopup.com/' })
const ngStream = new SitemapStream({ hostname: 'https://bittopup.com/' })

const BASE_URL = 'https://api.bittopup.com/naicha'
const BITTOPUP_BASE_URL = 'https://bittopup.com'
const TIMEOUT = 1000000
const IS_CACHE = false

function getUrl(path) {
  console.log(path)

  return `${BITTOPUP_BASE_URL}/${path}`
}

async function getAxiosData(url, params = {}) {
  try {
    const response = await axios.get(url, { params, timeout: TIMEOUT })
    return response.data.data
  } catch (error) {
    console.error(`Error fetching data from ${url}:`, error)
    return null
  }
}

function crawlPages(paths) {
  const streams = [
    enStream,
    ruStream,
    zhStream,
    zhhkStream,
    koStream,
    idStream,
    hiStream,
    msStream,
    esStream,
    arStream,
    ptStream,
    frStream,
    jaStream,
    ngStream,
  ]
  for (const path of paths) {
    streams.shift().write({ url: path, changefreq: 'weekly', lastmod: '2024-12-09', priority: 0.7 })
  }
}

async function processGoodsList() {
  const goodsList = await getAxiosData(`${BASE_URL}/open/allGoods`)
  if (!goodsList) return

  for (const { urlName } of goodsList) {
    const paths = [
      'goods',
      'ru/goods',
      'zh/goods',
      'zh-hk/goods',
      'ko/goods',
      'id/goods',
      'hi/goods',
      'ms/goods',
      'es/goods',
      'ar/goods',
      'pt/goods',
      'fr/goods',
      'ja/goods',
      'ng/goods',
    ].map((suffix) => `${suffix}/${urlName}`)
    crawlPages(paths)
  }
}

;(async () => {
  await processGoodsList()
  enStream.pipe(fs.createWriteStream('./public/goods-sitemap.xml'))
  enStream.end()
  hiStream.pipe(fs.createWriteStream('./public/goods-sitemap-hi.xml'))
  hiStream.end()
  zhStream.pipe(fs.createWriteStream('./public/goods-sitemap-zh.xml'))
  zhStream.end()
  zhhkStream.pipe(fs.createWriteStream('./public/goods-sitemap-zh-hk.xml'))
  zhhkStream.end()
  ruStream.pipe(fs.createWriteStream('./public/goods-sitemap-ru.xml'))
  ruStream.end()
  koStream.pipe(fs.createWriteStream('./public/goods-sitemap-ko.xml'))
  koStream.end()
  idStream.pipe(fs.createWriteStream('./public/goods-sitemap-id.xml'))
  idStream.end()
  msStream.pipe(fs.createWriteStream('./public/goods-sitemap-ms.xml'))
  msStream.end()
  esStream.pipe(fs.createWriteStream('./public/goods-sitemap-es.xml'))
  esStream.end()
  arStream.pipe(fs.createWriteStream('./public/goods-sitemap-ar.xml'))
  arStream.end()
  ptStream.pipe(fs.createWriteStream('./public/goods-sitemap-pt.xml'))
  ptStream.end()
  frStream.pipe(fs.createWriteStream('./public/goods-sitemap-fr.xml'))
  frStream.end()
  jaStream.pipe(fs.createWriteStream('./public/goods-sitemap-ja.xml'))
  jaStream.end()
  ngStream.pipe(fs.createWriteStream('./public/goods-sitemap-ng.xml'))
  ngStream.end()
  console.log('Data crawling completed.')
})()
