package com.izhang3.proxy.client;

import com.alibaba.fastjson.JSON;
import com.izhang3.proxy.config.ParserProperties;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.*;

/**
 * @author 张良
 * @desc
 * @since 2019/6/20 11:30
 */
public abstract class BaseParser {

    private static Logger logger = LoggerFactory.getLogger(BaseParser.class);


    /**
     * 获取代理网址配置信息
     *
     * @return
     */
    public abstract ParserProperties getConfig();


    /**
     * 解析数据
     *
     * @return
     */
    public List<String> parse() {
        ParserProperties parserProperties = getConfig();
        //代码配置信息为空或者代码未启用
        if (parserProperties == null || !parserProperties.getEnable()) {
            return Collections.emptyList();
        }
        String url = parserProperties.getUrl();
        try {
            logger.info("begin 爬取网址 url={}", url);
            Document document = Jsoup.connect(url)
                    .get();
            Elements elements = document.
                    select(parserProperties.getTable());
            List<String> proxyList = new ArrayList<>();
            for (Element element : elements) {
                String ip = element.select(parserProperties.getIp()).first().text();
                String port = element.select(parserProperties.getPort()).first().text();
                proxyList.add(ip + ":" + port);
            }
            logger.info("end 爬取网址 url={} result={}", url, JSON.toJSON(proxyList));
            return proxyList;
        } catch (Exception e) {
            logger.error("网址{}爬取异常 ",url, e);
        }
        return Collections.emptyList();
    }
}
