const cheerio = require('cheerio')
const rewire = require('rewire')
const {saveFile} = rewire('../common/file.cjs')
const {uuid, arraySub, sleep} = rewire('../common/utils.cjs')
const {composeAsync, collectParamsAndResult, parallelMap, logParams, errorRetry, whenNoError} = rewire('../common/combinator.cjs')
const {makeFetcher, header, baseURL, wrapCache} = rewire('../common/fetch.cjs')
const { PrismaClient, Prisma } = require('@prisma/client')
const R = require('ramda')

const prisma = new PrismaClient()

function getHeaders() {
    const time = new Date().getTime()
    const time2 = Math.floor(time / 1000 ) 
    console.log('header!!!!')
    //"track_cookieID=425127203051831296; lfapp_utms=%7B%22medium%22%3A%22organic%22%2C%22source%22%3A%22bing%22%7D; Hm_lvt_da6a4bb9d79eebd389c88e4c2cdce536=1690631882,1690685573; isScreenType=true; initAddress=%7B%22provinceId%22%3A320000%2C%22provinceName%22%3A%22%E6%B1%9F%E8%8B%8F%E7%9C%81%22%2C%22cityId%22%3A320100%2C%22cityName%22%3A%22%E5%8D%97%E4%BA%AC%E5%B8%82%22%7D; Hm_lpvt_da6a4bb9d79eebd389c88e4c2cdce536=1690725425"
    return {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/115.0',
        "Cookie": 'track_cookieID=425127203051831296; lfapp_utms=%7B%22medium%22%3A%22organic%22%2C%22source%22%3A%22bing%22%7D; Hm_lvt_da6a4bb9d79eebd389c88e4c2cdce536=' + time2 + ',' + (time2 + 36000) + '; isScreenType=true; initAddress=%7B%22provinceId%22%3A3200100%2C%22provinceName%22%3A%22%E6%B1%9F%E8%8B%8F%E7%9C%81%22%2C%22cityId%22%3A320100%2C%22cityName%22%3A%22%E5%8D%97%E4%BA%AC%E5%B8%82%22%7D; Hm_lpvt_da6a4bb9d79eebd389c88e4c2cdce536=' + (time2 + 3600)
    }
}

let vedengFetch = baseURL(
    header(makeFetcher('get'), getHeaders), 
    "https://www.vedeng.com"
)
let vedengFetchPost = baseURL(
    header(makeFetcher('post'), getHeaders), 
    "https://www.vedeng.com"
)

/**
 * fetch方法参数是url
 */
const fetch = collectParamsAndResult(
        wrapCache(errorRetry(
            composeAsync(
                logParams(vedengFetch),
                validHtml
            ), 100, null, 10000
        ), 'https://www.vedeng.com', null, async data => await sleep(2500)), 
        ['url', 'fetchResult']
    )

function validHtml(data) {
    const html = data.data
    if (!(html.includes("验证码") && !html.includes("验证码登录"))) {
        
        return data
    }
    vedengFetch = baseURL(
        header(makeFetcher('get'), getHeaders), 
        "https://www.vedeng.com"
    )
    throw new Error("请处理验证码!!!")
}

/**
 * 所有分类
 * @returns 所有分类
 */
async function getAllCategory() {
    const url = "https://www.vedeng.com/api/pc/category/getnewsearchCategory"

    const fetch = collectParamsAndResult(
        logParams(vedengFetchPost),
        ['url', 'fetchResult']
    )

    const {fetchResult} = await fetch(url)
    const json = fetchResult.data.data
    let result = []
    for (i = 0; i < json.length; i++) {
        const p1 = json[i]
        if (p1['childCategoryList']) {
            for (j = 0; j < p1['childCategoryList'].length; j++) {
                const p2 = p1['childCategoryList'][j]
                result.push(p2['vCategoryId'])
                if (p2['childCategoryList']) {
                    for (k = 0; k < p2['childCategoryList'].length; k++) {
                        const p3 = p2['childCategoryList'][k]
                        result.push(p3['vCategoryId'])
                    }
                }
            }
        }
    }
    return R.map(item => "/c-" + item + ".html", result)
}

/**
 * 检查detail页面
 * @param data 
 * @returns 
 */
function checkDetail(data) {
    const {fetchResult} = data
    const html = fetchResult.data
    if (html.includes('gdt-photo')) {
        return data
    }
    throw new Error("check detail fail!")
}

/**
 * 保存数据
 * @param {url, html} data 
 */
async function saveData(data) {
    const {url, fetchResult} = data
    await prisma.beideng.create({
        data: {
            id: uuid(),
            url, 
            source: fetchResult.data
        }
    })
}

/**
 * 将详情URL解析出来
 * @param {} data 
 * @returns 
 */
async function parseDetailUrl(data) {
    const {fetchResult} = data
    const html = fetchResult.data
    const $ = cheerio.load(html)
    const urls = htmlAttrs($, '.chu-box a.img-goods', 'href')

    return urls;
}


async function parseAllDetailUrl(data) {
    console.log("正在解析")
    const {fetchResult} = data
    const html = fetchResult.data
    const $ = cheerio.load(html)

    // 获取当前页的URL
    const urls = htmlAttrs($, '.chu-box a.img-goods', 'href')

    // 获取总共的页数，分别取每一页的内容
    const urlPages = htmlAttrs($, '.page-num a', 'href').filter(url => !url.includes('javascript:'))
    const fn = composeAsync(fetch, parseDetailUrl)
    const urlsArr = await parallelMap(1, fn, urlPages)
    
    console.log("解析结果:" + (urlsArr.length > 0 ? R.concat(urls, ...urlsArr) : urls))
    return filterUnFetch(urlsArr.length > 0 ? R.concat(urls, ...urlsArr) : urls)
}

/**
 * 筛选出未爬取的url
 * @param {*} ids 
 * @returns 
 */
async function filterUnFetch(urls) {
    if (R.length(urls) == 0) {
        return []
    }
    const dbdata = await prisma.$queryRaw`SELECT url FROM beideng WHERE url IN (${Prisma.join(urls)})`
    const exitsList = dbdata.map(item => item['url'])
    return arraySub(urls, exitsList)
}


/**
 * 获取html中的css属性
 */
function htmlAttrs($, css, attr) {
    let rsList = []
    $(css).each(function(i, elem) {
        rsList[i] = $(this).attr(attr)
    })

    return rsList
}

async function saveDetailFile(data) {
    const {fetchResult} = data
    saveFile("e:/ss.txt", fetchResult.data)
    return data
}

const fetchDetailAndSave = composeAsync(
    fetch,
    whenNoError(checkDetail, saveData, console.log)
)

const fetchList = composeAsync(
    fetch,
    parseAllDetailUrl,
    R.partial(parallelMap, [1, fetchDetailAndSave])
)

// 读取所有的数据
// 读取每项数据的html
// 解析其中的各类信息
async function parseDetail() {
    const dbdata = await prisma.$queryRaw`SELECT id,source FROM beideng where approvalId is null limit 1000`
        if (dbdata == null || dbdata.length == 0) {
            return;
        }
        for (let index = 0; index < dbdata.length; index++) {
            const item = dbdata[index]
            
            console.log(item['id'])
            const html = item['source']
            const $ = cheerio.load(html)
            let name = $('div.gdi-sku-name h1').text()
            const specification = $('.select-common')
                .eq(0)
                .find('.select-common-choice')
                .map(function(i, el) {
                    return $(this).text().trim()
                })
                .toArray()
                .join(';')
            const attribute = $('.select-common')
                .eq(1)
                .find('.select-common-choice')
                .map(function(i, el) {
                    return $(this).text().trim()
                })
                .toArray()
                .join(';')
            const category = $('.bc-li').slice(1, -1)
                .map(function(i, el) {
                    return $(this).text().trim()
                })
                .toArray()
                .join('-')
            const detail = $('.gpw-body').text()
            const approvalId1 = $(".cp-li-right:contains('械备')").text().trim()
            const approvalId2 = $(".cp-li-right:contains('注准')").text().trim()
            const approvalId = approvalId1.length > 0 ? approvalId1 : approvalId2

            const specArr = specification.split(";")
            for (i = 0; i< specArr.length; i++) {
                name = name.replace(specArr[i], '')
            }
    
            const info = {
                approvalId, name, category, specification, attribute, detail
            }
            await prisma.beideng.update({
                where: { id: item.id },
                data: info
            })
        }
}

async function main2() {
    
    //await fetchList("/c-3160.html")
    //const rs = await fetch("https://www.baidu.com")
    //await fetch('/p/V514181.html')

    //const urls = ['/c-3280.html']
    //const urls = await getAllCategory()
    //console.log(R.indexOf('/c-3280.html', urls))
    // for (let index = 0; index < urls.length; index++) {
    //     const url = urls[index]
    //     await fetchList(url)
    // }
    //console.log(urls)

    await parseDetail()

}

main2()
