package com.team5.reptiles;

import com.team5.entity.RFRange;
import com.team5.mapper.RFRangeMapper;
import org.apache.log4j.Logger;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.chrome.ChromeOptions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;

import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.List;
import java.util.concurrent.TimeUnit;

@Component
public class DeclineAndIncrease {
    @Autowired
    RFRangeMapper rfRangeMapper;
    private static final Logger logger = Logger.getLogger(DeclineAndIncrease.class);
    private static final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");

    static void printDataList(List<IndustryData> dataList, int limit) {
        for (int i = 0; i < Math.min(limit, dataList.size()); i++) {
            IndustryData data = dataList.get(i);
            logger.info(String.format("%s: %.2f%%", data.getIndustry(), data.getPercentChange()));
        }
    }

    @Scheduled(cron = "0 0/10 * * * ?") // 每10分钟执行一次
    public void fetchIndustryData() {
        rfRangeMapper.deleteAll();
        int logId = LogIdGenerator.getNextLogId(); // 获取日志ID
        long startTime = System.currentTimeMillis(); // 记录爬虫开始时间

        // Set ChromeDriver path
        System.setProperty("webdriver.chrome.driver", "C:\\Users\\86155\\Desktop\\chrome-win64\\chromedriver.exe");

        // Configure ChromeOptions
        ChromeOptions options = new ChromeOptions();
        options.addArguments("--headless"); // Enable headless mode
        options.addArguments("--disable-gpu"); // Applicable to Windows OS
        options.addArguments("--no-sandbox");

        // Initialize WebDriver
        WebDriver driver = new ChromeDriver(options);

        try {
            // Define URL to scrape
            String url = "https://data.eastmoney.com/bkzj/hy.html";
            driver.get(url);

            // Wait for the page to load
            driver.manage().timeouts().implicitlyWait(15, TimeUnit.SECONDS);

            // Get the page source
            String pageSource = driver.getPageSource();

            // Parse HTML using Jsoup
            Document doc = Jsoup.parse(pageSource);

            // Find elements and extract data
            Elements rows = doc.select("tr");

            // Lists to store increasing and decreasing data
            List<IndustryData> increaseList = new ArrayList<>();
            List<IndustryData> decreaseList = new ArrayList<>();

            // Iterate through each row
            for (Element row : rows) {
                // Find the <td> elements within the row
                Elements columns = row.select("td");

                // Ensure the row has at least 4 columns (to avoid index out of bounds)
                if (columns.size() >= 4) {
                    // Get the text of the second and fourth <td> elements
                    String industry = columns.get(1).text();
                    String percentage = columns.get(3).text();

                    // Check if percentage is positive or negative
                    double percentChange = Double.parseDouble(percentage.replace("%", ""));
                    if (percentChange > 0) {
                        increaseList.add(new IndustryData(industry, percentChange));
                    } else {
                        decreaseList.add(new IndustryData(industry, percentChange));
                    }
                }
            }

            // Sort the lists based on percentage change
            Collections.sort(increaseList, Comparator.comparingDouble(IndustryData::getPercentChange).reversed());
            Collections.sort(decreaseList, Comparator.comparingDouble(IndustryData::getPercentChange));

            //数据库操作
            for (int i = 0; i < increaseList.size()&&i<5; i++) {
               IndustryData industryData = increaseList.get(i);
               RFRange rfRange = new RFRange();
               rfRange.setName(industryData.industry);
               rfRange.setChange(industryData.percentChange);
               rfRangeMapper.insert(rfRange);
            }
            //数据库操作
            for (int i = 0; i < decreaseList.size()&&i<5; i++) {
               IndustryData industryData = decreaseList.get(i);
               RFRange rfRange = new RFRange();
               rfRange.setName(industryData.industry);
               rfRange.setChange(industryData.percentChange);
               rfRangeMapper.insert(rfRange);
            }
        } catch (Exception e) {
            logger.error("Error fetching industry data: " + e.getMessage());
        } finally {
            // Close WebDriver
            if (driver != null) {
                driver.quit();
            }
        }

        long endTime = System.currentTimeMillis(); // 记录爬虫结束时间

        // Log crawler end information
        String logMessageEnd = "日志ID: " + logId +
                "，爬虫行为说明: 抓取板块资金流数据" +
                ", 爬虫网址: https://data.eastmoney.com/bkzj/hy.html"+
                "，爬虫开始时间: " + dateFormat.format(new Date(startTime)) +
                "，爬虫结束时间: " + dateFormat.format(new Date(endTime)) +
                "，爬虫数据表: 板块资金流";
        logger.info(logMessageEnd);
    }

    static class IndustryData {
        private String industry;
        private double percentChange;

        public IndustryData(String industry, double percentChange) {
            this.industry = industry;
            this.percentChange = percentChange;
        }

        public String getIndustry() {
            return industry;
        }

        public double getPercentChange() {
            return percentChange;
        }
    }
}
