package com.team5.reptiles;


import com.team5.entity.ChinaIndex;
import com.team5.mapper.ChinaIndexMapper;
import org.apache.log4j.Logger;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.chrome.ChromeOptions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;

import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.concurrent.TimeUnit;

@Component
public class ChinaIndexRep {
    @Autowired
    ChinaIndexMapper chinaIndexMapper;
    private static final Logger logger = Logger.getLogger(ChinaIndexRep.class);
    private static final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");

    @Scheduled(cron = "0 0/10 * * * ?") // 每10分钟执行一次
    public void fetchStockMarketData() {
        chinaIndexMapper.deleteAll();
        int logId = LogIdGenerator.getNextLogId(); // 获取日志ID
        long startTime = System.currentTimeMillis(); // 记录爬虫开始时间

        // 设置 ChromeDriver 路径
        System.setProperty("webdriver.chrome.driver", "C:\\Users\\86155\\Desktop\\chrome-win64\\chromedriver.exe"); // 替换为实际的 chromedriver 路径

        // 配置 ChromeOptions
        ChromeOptions options = new ChromeOptions();
        options.addArguments("--headless"); // 启用无头模式
        options.addArguments("--disable-gpu"); // 适用于Windows OS
        options.addArguments("--no-sandbox");

        // 初始化 WebDriver
        WebDriver driver = new ChromeDriver(options);

        try {
            // 连接到目标网站
            String url = "https://quote.eastmoney.com/center/qqzs.html";
            driver.get(url);

            // 等待页面加载
            driver.manage().timeouts().implicitlyWait(15, TimeUnit.SECONDS);

            // 获取页面HTML
            String pageSource = driver.getPageSource();

            // 使用 JSoup 解析 HTML
            Document doc = Jsoup.parse(pageSource);

            // 选择包含股市数据的表格
            Elements tables = doc.select("table#qqzs_asia_simple-table");

            for (Element table : tables) {
                Elements rows = table.select("tr");

                int count = 0; // 初始化计数器

                // 遍历每一行
                for (Element row : rows) {
                    if (count >= 9) { // 只处理前9个元素
                        break;
                    }

                    Elements cells = row.select("td");

                    // 只处理有数据的行（跳过表头）
                    if (cells.size() > 0) {
                        String marketName = cells.get(0).text(); // 股市板块名称
                        String indexValue = cells.get(1).text(); // 最新价
                        String change = cells.get(2).text();     // 涨跌额
                        String percentChange = cells.get(3).text(); // 涨跌幅
                        //加入数据库
                       ChinaIndex chinaIndex=new ChinaIndex();
                       String[] parts = marketName.split("●");
                       marketName=parts[1].trim();
                       chinaIndex.setName(marketName);
                       chinaIndex.setPrice(Double.parseDouble(indexValue));
                       System.out.println(chinaIndex);
                       chinaIndexMapper.insert(chinaIndex);
                    }
                }
            }
        } catch (Exception e) {
            logger.error("Error fetching stock market data: " + e.getMessage());
        } finally {
            // 关闭 WebDriver
            if (driver != null) {
                driver.quit();
            }
        }

        long endTime = System.currentTimeMillis(); // 记录爬虫结束时间

        // 记录爬虫结束信息
        String logMessageEnd = "日志ID: " + logId +
                "，爬虫行为说明: 抓取中国指数数据" +
                ", 爬虫网址: https://quote.eastmoney.com/center/qqzs.html"+
                "，爬虫开始时间: " + dateFormat.format(new Date(startTime)) +
                "，爬虫结束时间: " + dateFormat.format(new Date(endTime)) +
                "，爬虫数据表: 中国指数";
        logger.info(logMessageEnd);
    }
}

