const rewire = require('rewire')
const cheerio = require('cheerio')
const fs = require('fs')
const R = require('ramda')
const moment = require('moment')
const axios = require('axios')
const {htmlToMarkdown} = rewire('../common/markdown.cjs')
const utils = rewire('../common/utils.cjs')
const {pageListSpider} = rewire('../common/spider.cjs')
const {transByTencentApi, cutTrans, transByBaiduApi, trans} = rewire('../common/trans.cjs')
const file = rewire('../common/fileRemote.cjs')
const dbutils = rewire('../common/dbutils.cjs')
const combinator = rewire('../common/combinator.cjs')
const config = rewire('../config.cjs')
const state = rewire('../state.cjs')
const { PrismaClient, Prisma } = require('@prisma/client')

const prisma = new PrismaClient()

// https://www.digitalocean.com/community/tutorials
async function saveArticleInfo(info) {
    await prisma.article.create({
        data: info
    })
}



let getPageData = pageListSpider(
    async function(page) {
	console.log(`第${page}页`)
	const resp = await utils.httpPost(
	    'https://6zheuvkj88-3.algolianet.com/1/indexes/production_community_consolidated_newest/query?x-algolia-agent=Algolia%20for%20vanilla%20JavaScript%203.20.3&x-algolia-application-id=6ZHEUVKJ88&x-algolia-api-key=c5470567eae7fa1177d43222e18ba086',
	    {"params":`query=&page=${page}&hitsPerPage=20&facetFilters=%5B%22item_type%3Atutorial%22%2C%22item_subtype%3A-tech_talk%22%5D&numericFilters=%5B%5D`}
	)
	return resp.data
    },
    async function(data) {
	if (!R.isEmpty(data['hits']) && R.length(data['hits']) > 0) {
	    return true
	}
	else {
	    console.log(data)
	    return 'break'
	}
    },
    async function(data) {
	const urls = await dbutils.notInTable(
	    state.getDb(),
      'article',
	    'source_url',
	    data['hits'].map(item => `https://www.digitalocean.com/community/${item['feedable_path']}`)
	)
	return urls
    },
    1,
    async function(url) {
	console.info(`正在访问:${url}`)
	const resp = await utils.httpGet(url)
	return resp.data
    },
    async function(html) {
	if (R.contains(`class="tutorial-footer"`, html)) {
	    return true
	}
	else {
	    return 'continue'
	}
    },
    async function(html, url) {
	const $ = cheerio.load(html)
	const tags = JSON.stringify($('.meta-section.tags > a.tag').map(function(i, el) { return $(this).text().trim() }).get())
	const title = $('.content-title').text().trim()
	const sourceContent = $('.content-body').html()
	const publicTime = moment($('.tutorial-date').html(), 'MMMM DD, YYYY').valueOf()
	const description = $(`meta[name=description]`).attr('content')
	const descriptionZh = trans(description, 'zh')

	const imgPath = '/media/' + utils.uuid() + '.png'
	const topImg = await file.downloadAndUpload($(`meta[property=og:image:url]`).attr('content'), imgPath)

	await saveArticleInfo({
	    id: utils.uuid(),
	    title,
	    content_text: '',
	    public_time: publicTime,
	    ctime: utils.currentMs(),
	    html,
	    source_url: url,
	    trans: 0,
	    description_zh: descriptionZh,
	    topImg, description, tags
	})
    }
)



async function dbToMdTask() {
    const data = await readFromDb()
    for (let info of data) {
	const content = info['content_text']
	const contentMd = await transMD(content)
	const metaData = createMetaData(info)
	const md = metaData + '\n' + contentMd
	const path = `e:/project/blog_site/` + info['title'].replace(' ', '_') + '.md'
	saveFile(contentText, path)
    }
}

async function readFromDb() {
    const data = await prisma.article.findMany({
	where: {startsWith: 'https://www.digitalocean.com/community'}
    })
    return data
}

function createMetaData(info) {
    let rs = ''
    rs += `---\n`
    rs += `title: ${info['title']}\n`
    rs += `date: ${moment(info['public_time']).format('YYYY-MM-DD')}\n`
    rs += `tags: \n` + JSON.parse(info['tags']).map(tag => `- ${tag}\n`).join('')
    rs += `categories: 翻译\n`
    rs += `keywords: \n` + JSON.parse(info['tags']).map(tag => `- ${tag}\n`).join('')
    rs += 'description: ' + info['description_zh'] + '\n'
    rs += `top_img: ${info['topImg']}\n`
    rs += '---\n'
    return rs
}


function createRegExer(md, regList) {
    let txtList = []
    let i = 0
    let text = ''

    function regReplace(str, reg) {
	const exeRs = str.match(reg)
	if (exeRs) {
	    txtList.push(exeRs[1])
	    const s = str.replace(reg, `[[${i}]]`)
	    i = i + 1
	    return regReplace(s, reg)
	}
	return str
    }

    function replaceText() {
	let index = 0
	let str = md
	for (let reg of regList) {
	    str = regReplace(str, reg)
	}
	text = str
	return text
    }
    function backText(rtxt) {
	let rs = rtxt
	for (let i = 0; i < txtList.length; i++) {
	    rs = rs.replace(`[[${i}]]`, txtList[i])
	}
	return rs
    }
    function backList(ls) {
	let rs = text
	for (let i = 0; i < ls.length; i++) {
	    rs = rs.replace(`[[${i}]]`, ls[i])
	}
	return rs
    }
    
    function getTextList() {
	return txtList
    }
    
    return {replaceText, backText, getTextList, backList}
}


function markdownTrans(trans) {
    async function theFn(md) {
	var regex1 = new RegExp('(```(?:.|\n)*?```)')
	var regex2 = new RegExp('(`(?:.|\n)*?`)')
	var regex3 = /(\!\[.*?\]\(.*?\))/
	var regex4 = /(\[.*?\]\(.*?\))/
	var regex5 = /(\#+ )/
	const regExer = createRegExer(md, [regex1, regex2, regex3, regex4, regex5])
	const text = regExer.replaceText()
	const rtxt = await trans(text)
	const rs = regExer.backText(rtxt)
	return rs
    }
    return theFn
}


async function replaceImages(md) {
    const reg = /\!\[.*?\]\((.*?)\)/
    const exer = createRegExer(md, [reg])
    exer.replaceText()
    const urls = exer.getTextList()
    const newUrls = await combinator.asyncMap(async function(url) {
	const path = '/media/' + utils.uuid() + '.png'
	return await file.downloadAndUpload(url, path)
    }, urls)
    return exer.backList(R.map(function(url) {
	return `![${url}](${url})`
    },newUrls))
}

async function transMD(html) {
    const mdtrans = markdownTrans(cutTrans(R.partialRight(transByTencentApi, ['en', 'zh']), 3000))
    const md = replaceImages(htmlToMarkdown(html.replaceAll('&nbsp;', '')))
    const rs = await mdtrans(md)
    return rs
}



getPageData(1,2)
//transMD(file.readFile('e:/www.html'))
