package cjq.demo.demo;

import cn.edu.hfut.dmic.webcollector.crawler.DeepCrawler;
import cn.edu.hfut.dmic.webcollector.model.Links;
import cn.edu.hfut.dmic.webcollector.model.Page;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;

import java.net.URLEncoder;
import java.util.List;

/**
 *
 *
 * @author <a href="ls.zhaoxiangyu@gmail.com">zhao</>
 * @date 2015-10-22
 */
public class DemoJSCrawler extends DeepCrawler {

    public DemoJSCrawler(String crawlPath) {
        super(crawlPath);
    }

    @Override
    public Links visitAndGetNextLinks(Page page) {
        //HtmlUnitDriver
//      handleByHtmlUnitDriver(page);
        //PhantomJsDriver
        handleByPhantomJsDriver(page);
        return null;
    }

    /**
     * webcollector自带获取html driver测试
     *
     * @param page
     */
    /*protected void handleByHtmlUnitDriver(Page page){
         *//*HtmlUnitDriver可以抽取JS生成的数据*//*
        HtmlUnitDriver driver=PageUtils.getDriver(page,BrowserVersion.CHROME);
      *//*HtmlUnitDriver也可以像Jsoup一样用CSS SELECTOR抽取数据
        关于HtmlUnitDriver的文档请查阅selenium相关文档*//*
        print(driver);
    }*/

    /**
     * phantomjs driver测试
     *
     * @param page
     */
    protected void handleByPhantomJsDriver(Page page){
        WebDriver driver=PageUtils.getWebDriver(page);
        print(driver);
        driver.quit();
    }

    protected void print(WebDriver driver){
        List<WebElement> divInfos = driver.findElements(By.cssSelector("div.search_result_single"));
        for(WebElement divInfo:divInfos){
            WebElement price=divInfo.findElement(By.cssSelector("a.query_name"));
            String href = price.getAttribute("href");
            System.out.println(price.getText()+":"+href);
        }
    }
    public static void main(String[] args) throws Exception {
        DemoJSCrawler crawler=new DemoJSCrawler("d:/yzm");
        String url = "http://www.tianyancha.com/search?key=%s&checkFrom=searchBox";
        String entName = "南京三宝科技股份有限公司";
        entName = URLEncoder.encode(entName, "utf-8");
        crawler.addSeed(String.format(url,entName));
        crawler.start(1);
    }

}