/**
 * Created by xuyq on 2023/3/6.
 */
const Nightmare = require('nightmare');
const fs = require('fs')

const nightmare = Nightmare({
    waitTimeout: 60 * 1000 * 2,
    show: true,
    openDevTools: {
        mode: 'detach'
    },
});

const visitedUrls = new Set();

const result = [];
let pageData = []
const crawlPage = async (url) => {
    if (visitedUrls.has(url)) {
        return;
    }
    visitedUrls.add(url);
    console.log(`Crawling ${url}...`);
    try {
        const page = nightmare.goto(url);
        const pageTitle = await page.title();
        result.push({url, pageTitle});
        await page.wait(2000)
        await page.inject('js', 'node_modules/jquery/dist/jquery.min.js')
        const {data} = await page.evaluate(() => {
            const contentSelectorKey = '.crawler_content'
            const activeSelectorKey = 'a.crawler_active'
            const levelClassReg = /(lv)\S{1}/
            const activeNodes = Array.from(document.querySelectorAll(activeSelectorKey)).sort((a, b) => {
                let a_class = a.classList.value.match(levelClassReg) && a.classList.value.match(levelClassReg)[0] || 'lv5'
                let b_class = b.classList.value.match(levelClassReg) && b.classList.value.match(levelClassReg)[0] || 'lv5'
                return a_class.split('lv')[1] - b_class.split('lv')[1]
            })
            const actives = activeNodes.map(function (el) {
                return el.innerText || el.textContent
            })
            const curLevel = activeNodes[activeNodes.length - 1].classList.value.match(levelClassReg)[0].split('lv')[1]
            console.log(curLevel)
            const contents = Array.from(document.querySelectorAll(contentSelectorKey)).filter(function (el) {
                const {width, height} = el.getBoundingClientRect();
                const level = el.classList.value.match(levelClassReg) && el.classList.value.match(levelClassReg)[0].split('lv')[1] || '5'
                return width && height && level >= curLevel
            })

            const res = contents.map(el => {
                return {
                    url: window.location.href,
                    content: el.innerText || el.textContent,
                    path: actives.length ? actives.join('/').replace(/\n/g, '') : '',
                }
            })
            let data = []
            let dataSet = new Set()
            res.forEach(el => {
                if(el.content && !dataSet.has(el.content)){
                    data.push(el)
                    dataSet.add(el.content)
                }
            })
            return {data};
        });
        pageData = pageData.concat(data)
    } catch (error) {
        console.error(`Failed to crawl ${url}: ${error}`);
    }
};


nightmare.goto('http://localhost:8080/#/login?redirect=aHR0cDovL2xvY2FsaG9zdDo4MDgwLyMvaG9tZQ%3D%3D')
    .type('input[placeholder="用户名"]', 'yanqi.xu')
    .type('input[placeholder="密码"]', '123123123')
    .click('button')
    .wait('.banner')
    .inject('js', 'node_modules/jquery/dist/jquery.min.js')
    .url()
    .then(async url => {
        fs.readFile('./urls.txt', 'utf8', async function (err, datastr) {
            if (err) {
                console.log(err, '读取url失败')
            } else {
                const urls = datastr.split('\n')
                console.log(urls)
                for (let url of urls) {
                    url && await crawlPage(url)
                }
                fs.writeFile('./data.json', JSON.stringify(pageData), function (err) {
                    if (err) console.log('write fail', err)
                    else console.log('data save success！')
                })
                nightmare.end();
            }
        })

    })
    .catch((error) => {
        console.error('Search failed:', error);
        nightmare.end();
    });



