package com.share51.novel.controller;

import com.share51.novel.service.CrawlService;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*;

import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;

/**
 * 爬虫管理
 * @author gb
 */
@RestController
@RequestMapping("/sys/crawl")
public class CrawlController {

    @Resource
    private CrawlService crawlService;

    // 列表爬虫

    // 查看爬虫详情

    // 删除爬虫（删除小说后删除爬虫）

    // 修改爬虫

    // 新增爬虫 （爬取内容详情页和分类页）

    /**
     * 新增爬虫
     * @param url 爬虫采集的url
     * @param type 爬虫采集的分类
     * @param site 爬虫解析规则
     * @return
     */
    @PostMapping
    public String add(String url, Integer type, Integer site) throws Exception {
        crawlService.addCrawl(url,type,site);
        return "正在爬虫处理，请等待。。。";
    }

    // 停止爬虫

    // 启动爬虫

    // 停止或启动正在运行中的爬虫

    @ResponseStatus(HttpStatus.FORBIDDEN)
    @GetMapping("/test")
    public void test(HttpServletRequest request){
        String ip = request.getHeader("x-forwarded-for");
        if (ip == null || ip.length() == 0 || "unknown".equalsIgnoreCase(ip)) {
            ip = request.getHeader("Proxy-Client-IP");
        }
        if (ip == null || ip.length() == 0 || "unknown".equalsIgnoreCase(ip)) {
            ip = request.getHeader("WL-Proxy-Client-IP");
        }
        if (ip == null || ip.length() == 0 || "unknown".equalsIgnoreCase(ip)) {
            ip = request.getHeader("HTTP_CLIENT_IP");
        }
        if (ip == null || ip.length() == 0 || "unknown".equalsIgnoreCase(ip)) {
            ip = request.getHeader("HTTP_X_FORWARDED_FOR");
        }
        if (ip == null || ip.length() == 0 || "unknown".equalsIgnoreCase(ip)) {
            ip = request.getRemoteAddr();
        }
        System.out.println("ip = " + ip);
    }
}
