package cn.joinhealth.effect.spark.service;

import cn.joinhealth.effect.spark.util.XmlUtils;
import cn.joinhealth.effect.spark.common.ClickHouseProperties;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.time.LocalDate;
import java.time.format.DateTimeFormatter;

/**
 * @author yuan
 * @description 统计患者服务
 * @date 2022/3/10 17:27
 */
public class PatientStatisticsService {

    private static final Logger logger = LoggerFactory.getLogger(PatientStatisticsService.class);

    /**
     * 表名
     */
    private static final String TABLE_NAME = "t_patient_statistics";

    /**
     * 统计患者服务次数
     *
     * @param sparkSession SparkSession
     */
    public static void countServiceTimes(SparkSession sparkSession) {
        String sql = XmlUtils.getSql("countServiceTimes", null);
        Dataset<Row> dataset = sparkSession.sql(sql);
        logger.info("spark统计患者服务次数: count={}", dataset.count());
        DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyyMMdd");
        String tableName = TABLE_NAME + "_" + formatter.format(LocalDate.now());
        // 写入spark计算结果
        dataset.write()
                .mode(SaveMode.Append)
                .option(JDBCOptions.JDBC_BATCH_INSERT_SIZE(), 100000)
                .jdbc(ClickHouseProperties.CLICK_HOUSE_URI, tableName, ClickHouseProperties.PROPERTIES);
    }
}
