package com.ly.crawl.web;

import com.ly.crawl.Crawler;
import com.ly.crawl.impl.DownDataDao;
import com.ly.crawl.impl.ListDataCapturer;
import com.ly.crawl.impl.SimpleDataCapturer;
import com.ly.crawl.impl.SimpleUrlCapturer;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.servlet.ModelAndView;

import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import java.io.IOException;
import java.net.URLEncoder;
import java.util.List;
import java.util.Map;

@RestController
@RequestMapping("crawl")
public class CrawlDo {

    @ExceptionHandler(Exception.class)
    String handleEx(Exception e) {
        return e.getMessage();
    }

    @RequestMapping({"/", ""})
    ModelAndView index(ModelAndView mav) {
        mav.setViewName("redirect:/crawl/index.html");
        return mav;
    }

    @RequestMapping("get")
    Map<String, Object> get(String url, @RequestParam Map<String, String> colMapVals) throws IOException {
        colMapVals.remove("url");
        Crawler<Map<String, Object>> crawler = new Crawler<>(url, new SimpleDataCapturer(colMapVals));
        return crawler.start();
    }

    @RequestMapping("list")
    List<Map<String, Object>> list(String url, @RequestParam Map<String, String> colMapVals) throws IOException {
        colMapVals.remove("url");
        Crawler<List<Map<String, Object>>> crawler = new Crawler<>(url, new ListDataCapturer(colMapVals));
        return crawler.start();
    }

    @RequestMapping("capture")
    Object capture(String url, @RequestParam Map<String, String> colMapVals,
                   HttpServletResponse response, HttpSession session) throws IOException {
        session.removeAttribute("stopCapture");
        session.removeAttribute("crawlProgress");
        // 爬取的初始路径
        colMapVals.remove("url");
        // 表名
        String table = getAndRemove(colMapVals, "table", "你的表名");
        // 下载 和 搜索的网页地址
        String urlRegex = getAndRemove(colMapVals, "urls");
        // 导出SQL脚本, 无参数则返回 json 数据
        boolean downSql = getAndRemove(colMapVals, "down") != null;
        // 创建爬虫
        ListDataCapturer listDataCapturer = new ListDataCapturer(colMapVals);
        Crawler<List<Map<String, Object>>> crawler = new Crawler<>(url, listDataCapturer);
        if (urlRegex != null) {
            crawler.setUrlsCapturer(new SimpleUrlCapturer(null, urlRegex));
        }
        if (downSql) {
            response.setContentType("text/html;charset=utf-8");
            response.setHeader("content-disposition", "attachment;filename=" + URLEncoder.encode(table + ".SQL", "UTF-8"));
            crawler.setDao(new DownDataDao(response.getWriter(), table, listDataCapturer.getColumns()));
        }
        crawler.setProgressListener(pe -> {
            session.setAttribute("crawlProgress", pe);
            if (session.getAttribute("stopCapture") != null) {
                session.removeAttribute("stopCapture");
                return false;
            }
            return true;
        });
        return crawler.setReturnData(downSql == false).start();
    }

    @RequestMapping("progress")
    Object getProgress(HttpSession session) {
        Object ret = session.getAttribute("crawlProgress");
        session.removeAttribute("crawlProgress");
        return ret;
    }

    @RequestMapping("stop")
    void stopCapture(HttpSession session) {
        session.setAttribute("stopCapture", true);
    }

    private String getAndRemove(Map<String, String> colMapVals, String key) {
        return this.getAndRemove(colMapVals, key, null);
    }

    private String getAndRemove(Map<String, String> colMapVals, String key, String defaultValue) {
        String ret = colMapVals.get(key);
        if (ret == null) {
            ret = defaultValue;
        } else {
            colMapVals.remove(key);
        }
        return ret;
    }

}
