package com.xm.spider;

import lombok.extern.slf4j.Slf4j;
import org.openqa.selenium.*;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.chrome.ChromeOptions;
import org.openqa.selenium.interactions.Actions;
import org.openqa.selenium.remote.CapabilityType;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.support.ui.WebDriverWait;

import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.time.Duration;
import java.util.Collections;
import java.util.List;
import java.util.Random;

/**
 * java爬虫示例
 */
@Slf4j
public class SeleniumExample {
    static final String[] userAgents = {
            "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 Edg/131.0.0.0",
            "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36",
            "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.5845.97 Safari/537.36 Core/1.116.460.400 QQBrowser/13.3.6167.400"
    };

    /**
     * 测试打开网页
     *  1、打开翻译网页
     *  2、打开www.89ip.cn网页
     *  3、打开httpbin.org/ip网页
     */
    public static void main1(String[] args) {
        long start = System.currentTimeMillis();
        WebDriver driver = null;
        try {
            log.info("爬虫开始");
            Random random = new Random();
            System.setProperty("webdriver.chrome.driver", "C:\\Program Files\\Google\\Chrome\\Application\\chromedriver.exe");

            ChromeOptions options = new ChromeOptions();
            options.addArguments("--remote-allow-origins=*");
            options.addArguments("--user-agent=" + userAgents[random.nextInt(3)]);
            options.addArguments("--log-level=INFO");
            driver = new ChromeDriver(options);

            log.info("爬虫正在打开翻译页面。。");
            driver.get("https://fanyi.youdao.com/#/TextTranslate");
            new WebDriverWait(driver, Duration.ofSeconds(10)).until(ExpectedConditions.titleIs(driver.getTitle()));

            driver.switchTo().newWindow(WindowType.TAB);
            new WebDriverWait(driver, Duration.ofSeconds(2)).until(ExpectedConditions.titleIs(driver.getTitle()));
            if(driver.getTitle() == null){
                log.error("爬虫切换新窗口失败！");
                return;
            }
            log.info("爬虫正在打开89ip页面。。");
            driver.get("https://www.89ip.cn/");

            driver.switchTo().newWindow(WindowType.TAB);
            new WebDriverWait(driver, Duration.ofSeconds(2)).until(ExpectedConditions.titleIs(driver.getTitle()));
            if(driver.getTitle() == null){
                log.error("爬虫切换新窗口失败！");
                return;
            }
            log.info("爬虫正在打开crossip页面。。");
            driver.get("https://httpbin.org/ip");

            driver.manage().window().maximize();

            Thread.sleep(30000L);
        } catch (InterruptedException e) {
            throw new RuntimeException(e);
        } finally {
            log.info("爬虫结束。耗时={}s", (System.currentTimeMillis() - start)/1000L);
            if(driver != null){
                driver.quit();
            }
        }
    }

    /**
     * 测试翻译一段句子 并将翻译结果写入output.txt文件
     */
    public static void main2(String[] args) {
        long start = System.currentTimeMillis();
        WebDriver webDriver = null;
        try {
            log.info("爬虫开始");
            Random random = new Random();
            //浏览器输入‘chrome://version/’查看谷歌版本：120.0.6099.71
//            System.setProperty("webdriver.chrome.driver", "D:\\java\\spider\\chromedriver.exe");
            System.setProperty("webdriver.chrome.driver", "C:\\Program Files\\Google\\Chrome\\Application\\chromedriver.exe");

            ChromeOptions chromeOptions = new ChromeOptions();
//            chromeOptions.addArguments("--headless");//是否打开浏览器
            chromeOptions.addArguments("--remote-allow-origins=*");
            //预反爬虫11：User-Agent伪装；
            chromeOptions.addArguments("--user-agent=" + userAgents[random.nextInt(3)]);
            chromeOptions.addArguments("--log-level=INFO");
//            chromeOptions.addArguments("--proxy-server=47.122.65.254:8080");//配置后无法正常访问网页

            //预反爬虫22：代理伪装（失败了，大概率是免费代理IP有问题）
            Proxy proxy = new Proxy();
            proxy.setHttpProxy("47.122.65.254" + ":" + "8080");
            proxy.setSslProxy("47.122.65.254" + ":" + "8080");
            chromeOptions.setProxy(proxy);
//            chromeOptions.setCapability(CapabilityType.PROXY, proxy);//配置后无法正常访问网页

            webDriver = new ChromeDriver(chromeOptions);

            //验证代理IP是否设置成功
            webDriver.get("https://httpbin.org/ip");
            new WebDriverWait(webDriver, Duration.ofSeconds(2)).until(ExpectedConditions.titleIs(webDriver.getTitle()));
            WebElement crossElement = webDriver.findElement(By.tagName("pre"));
            if(crossElement == null){
                log.error("爬虫代理IP检测失败！未找到cross。");
                return;
            }
            log.info("爬虫代理IP检测成功。代理IP为 {}", crossElement.getText());

            webDriver.switchTo().newWindow(WindowType.TAB);
            if (webDriver.getTitle() == null) {
                log.error("爬虫切换新窗口失败！");
                return;
            }

            //开始翻译内容
            webDriver.get("https://fanyi.youdao.com/#/TextTranslate");

            WebDriverWait webDriverWait = new WebDriverWait(webDriver, Duration.ofSeconds(10));
            webDriverWait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("#js_fanyi_input")));

            WebDriver.Navigation navigate = webDriver.navigate();
            navigate.refresh();//刷新页面，消除异常出现的浮窗

            Actions actions = new Actions(webDriver);
            WebElement inputElement = webDriver.findElement(By.cssSelector("#js_fanyi_input"));
//            actions.moveToElement(inputElement);//鼠标悬停
            actions.clickAndHold(inputElement);//鼠标单击

            //最大字符5000，需要对长文本分组
//            inputElement.sendKeys("hello selenium!");
            inputElement.sendKeys("The place stank. A queer, mingled stench that only the ice buried\n" +
                    "cabins of an Antarctic camp know, compounded of reeking human\n" +
                    "sweat, and the heavy, fish oil stench of melted seal blubber. An\n" +
                    "overtone of liniment combated the musty smell of sweat-andsnow-drenched furs. The acrid odor of burnt cooking fat, and the\n" +
                    "animal, not-unpleasant smell of dogs, diluted by time, hung in the\n" +
                    "air.\n" +
                    "Lingering odors of machine oil contrasted sharply with the taint of\n" +
                    "harness dressing and leather. Yet, somehow, through all that reek\n" +
                    "of human beings and their associates - dogs, machines and cooking\n" +
                    "- came another taint. It was a queer, neck-ruffling thing, a faintest\n" +
                    "suggestion of an odor alien among the smells of industry and life.\n" +
                    "And it was a lifesmell. But it came from the thing that lay bound\n" +
                    "with cord and tarpaulin on the table, dripping slowly, methodically\n" +
                    "onto the heavy planks, dank and gaunt under the unshielded glare\n" +
                    "of the electric light.");

            log.info("爬虫正在读取翻译结果11");
            new WebDriverWait(webDriver, Duration.ofSeconds(20)).until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("#js_fanyi_output_resultOutput")));

            StringBuilder builder = new StringBuilder();
            log.info("爬虫正在读取翻译结果22");

            //xpath：//*[@id="js_fanyi_output_resultOutput"]
            List<WebElement> elements = webDriver.findElements(By.xpath("//*[@id=\"js_fanyi_output_resultOutput\"]/child::p"));
            log.info("爬虫正在读取翻译结果33");
            if(elements != null && !elements.isEmpty()){
                for (WebElement element : elements) {
                    builder.append(element.getText());
                }
            }

            //预反爬虫33：等待随机0~50ms
            Thread.sleep((long) (Math.random()*50));

            log.info("爬虫正在将翻译结果写入txt文件");
            String filePath = "D:\\data\\新建文件夹\\output.txt";
            String content = builder.toString();
            try (BufferedWriter writer = new BufferedWriter(new FileWriter(filePath))) {
                writer.write(content);
            } catch (IOException e) {
                log.error("爬虫将翻译结果写入txt文件异常！", e);
            }

            log.info("爬虫正在关闭。。");
            //休息十分钟
            Thread.sleep(3000L);//不推荐
            Thread.sleep(300000L);//不关闭页面

        } catch (InterruptedException e) {
            throw new RuntimeException(e);
        } finally {
            log.info("爬虫结束。耗时={}s", (System.currentTimeMillis() - start)/1000L);
            if(webDriver != null){
                webDriver.quit();
            }
        }
    }
}
