import superagent from "superagent";
import cheerio from 'cheerio';
import path from "path";
import fs from "fs";
import { moveEmitHelpers } from "typescript";



type Info = {
    title:string;
    url:string;
    pic:string;
}

type InfoResult = {

    time:number;
    data:Info[];
}

type objJson = {

    [propName:number]:Info[]
}




class Crawler {
    private url:string = 'https://movie.douban.com/top250';

    constructor() {
        this.initSpiderProcess();
    }

    async getRawHtml(url:string) {

        const result = await superagent.get(url);
        console.log('获取页面成功');
        
        return result;
    }

    /**
     * 
     * @param html 
     * @returns 
     */
    async getJsonInfo(html:any) {
        
        const $  = cheerio.load(html); // 使用cheerio将html字符串转换为虚拟dom对象
        const movies = $('.grid_view .item');
        // console.log(movies);

        let info:InfoResult = {
            time:new Date().getTime(),
            data:[]
        };

        for( let i = 0;i < moveEmitHelpers.length;i++){
            let movie = movies[i];
            let pic:string = $(movie).find('.pic img').attr('src') || "";
            let title:string = $(movie).find('.info .title:first-child').text() || "";
            let url:string = $(movie).find('.info a').attr('href') || "";

            info.data[i] = {
                title,
                url,
                pic
            }
        }
        
        return info;
    }

    /**
     * 
     * @param info 
     */
    async getJsonContent(info:InfoResult) {
        const filePath = path.resolve(__dirname,'./data/url.json');

        let fileContent:objJson = {};

        if(fs.existsSync(filePath)) {
            // 使用 fs.readFileSync() 读取出的是buffer对象
            fileContent = JSON.parse(fs.readFileSync(filePath,'utf-8'));
        }
        fileContent[info.time] = info.data;
        fs.writeFileSync(filePath,JSON.stringify(fileContent));

        
    }


    /**
     * 
     */
    async initSpiderProcess() {
        const {text:html} = await this.getRawHtml(this.url);
        const info = await this.getJsonInfo(html);
        console.log(info);

        await this.getJsonContent(info);
        // console.log(html,'\nhtml--------\n',typeof html);
    }




}

const crawler = new Crawler();
