package com.chance.cc.crawler.prod.command.meta;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.downloader.HttpConstant;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.HttpRequest;
import com.chance.cc.crawler.core.downloader.HttpRequestBody;
import com.chance.cc.crawler.meta.core.bean.crawler.CrawlerDomainKeys;
import com.chance.cc.crawler.prod.command.HttpCommand;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;

/**
 * created by CC on 2020/11/26
 * mail 279020185@qq.com
 */
@Slf4j
public class CrawlerDomainKeywordCommand extends HttpCommand {

    public CrawlerDomainKeywordCommand(String host, int port){
        super(host,port);
    }

    public HttpPage addOrUpdateKeys(CrawlerDomainKeys crawlerDomainKeys){
        String url = commandUrl("add/keys");
        //compose crawler scheduler job
        //generate request
        HttpRequest request = new HttpRequest();
        request.setUrl(url);
        request.setMethod(HttpConstant.Method.POST);
        request.setRequestBody(HttpRequestBody.json(JSON.toJSONString(crawlerDomainKeys),"utf-8"));
        HttpPage httpPage = doCommand(request);
        log.info("add or update domain keys result: {}",httpPage.getRawText());
        return httpPage;
    }

    public HttpPage delKeys(CrawlerDomainKeys crawlerDomainKeys){
        String url = commandUrl("del/key");
        //compose crawler scheduler job
        //generate request
        HttpRequest request = new HttpRequest();
        request.setUrl(url);
        request.setMethod(HttpConstant.Method.POST);
        request.setRequestBody(HttpRequestBody.json(JSON.toJSONString(crawlerDomainKeys),"utf-8"));
        HttpPage httpPage = doCommand(request);
        log.info("del domain keys result: {}",httpPage.getRawText());
        return httpPage;
    }

    @Override
    public String initUrlPrefix(String host,int port) {
        return StringUtils.join("http://",host,":",port,"/v1/meta/");
    }
}
