import superagent from "superagent";
import path from "path";
import fs from "fs";

import WalkerAnalyzer from "./walkerAnalyzer";

export interface Analyzer {
  analyze: (html: string) => string;
  merge: (saved: string, justRetrived: string) => string;
}

class Crawler {
  constructor(
    private url: string,
    private filePath: string,
    private analyzer: Analyzer
  ) {
    this.initSpiderProcess();
  }

  private async initSpiderProcess() {
    const html = await this.getRawHtml();
    const info = this.analyzer.analyze(html);
    this.writeFile(this.analyzer.merge(this.readFile(), info));
  }

  private async getRawHtml() {
    const result = await superagent.get(this.url);
    return result.text;
  }

  private readFile(): string {
    if (fs.existsSync(this.filePath)) {
      return fs.readFileSync(this.filePath, "utf-8");
    }
    return "{}";
  }

  private writeFile(content: string) {
    fs.writeFileSync(this.filePath, content);
  }
}

const url = "http://www.dell-lee.com/typescript/demo.html";
const filePath = path.resolve(__dirname, "../data/course.json");

const analyzer = WalkerAnalyzer.getInstance();

new Crawler(url, filePath, analyzer);

console.log("running..");
