import superagent from 'superagent';
import cheerio from 'cheerio';
import fs from 'fs';
import path from 'path';

// 自定义目标数据存储的类型
interface TargetData {
    title: string,
    desc: string
};

interface RetData {
    time: number,
    data: Array<TargetData>
};

interface FileData {
    [prop: number]: Array<TargetData>
}


class Crawler {
    private url = `http://toscrape.com/`;
    constructor() {
        this.crawlProcess();
    }
    // 爬取html
    async getComposition():Promise<string> {

        const respRet = await superagent.get(this.url);
        const respHtml = respRet.text;
        return respHtml;
    }
    // 获取内容
    getTargeData(html: string): RetData {
        const targetData: Array<TargetData> = [];
        const $ = cheerio.load(html);
        // 先找到 tr，然后拿 a 和 第二个rd 里的内容
        const trList = $($('.table')[1]).find('tr');
        trList.map((index, item) => {
            if (index > 0) {
                const title = $(item).find('a').text();
                const desc = $(item).find('td').text();
                targetData.push({
                    title,
                    desc
                });
            }
        });
        const retData: RetData = {
            time: new Date().getTime(),
            data: targetData
        };
        return retData;
    }
    // 组装数据
    assemblyData(data: RetData): FileData {
        let fileData: FileData = {};
        const filePath = path.resolve('data/ret.json');
        if (fs.existsSync(filePath)) {
            // 文件存在，缓存数据
            fileData = JSON.parse(fs.readFileSync(filePath, 'utf-8'));
        } else {
            fs.mkdirSync('data');
        }
        fileData[data.time] = data.data;
        return fileData;
    }

    // 抽离数据并解耦
    async crawlProcess(): Promise<void> {
        const filePath = path.resolve('data/ret.json');
        const html = await this.getComposition();
        const retData = this.getTargeData(html);
        const fileData = this.assemblyData(retData);
        fs.writeFileSync(filePath, JSON.stringify(fileData));
        console.log('aaa', fileData);
    }

}

const c1 = new Crawler();