package com.ayg;

import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.core.env.Environment;
import org.springframework.jdbc.core.BatchPreparedStatementSetter;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.web.client.RestTemplate;

import javax.sql.DataSource;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

@SpringBootApplication
public class Es2mysqlApplication implements CommandLineRunner {

    public static void main(String[] args) {
        SpringApplication.run(Es2mysqlApplication.class, args);
    }

    @Value("${es.url}")
    private String es_url;
    @Value("${es.query}")
    private String es_query;
    @Value("${es.batchsize}")
    private long es_batchsize;

    private String target_sql="insert into not_audited_policy\n"+
            "(serial_number\t,\n" +
            "title\t,\n" +
            "content\t,\n" +
            "organization\t,\n" +
            "province\t,\n" +
            "city\t,\n" +
            "area\t,\n" +
            "tax_type\t,\n" +
            "usage_type\t,\n" +
            "effect_level\t,\n" +
            "apply_target\t,\n" +
            "audited\t,\n" +
            "publish_date\t,\n" +
            "effective_date\t,\n" +
            "expire_date\t,\n" +
            "created_at\t,\n" +
            "orig_url\t\n)" +
            "select publishno,\n" +
            "title,\n" +
            "content,\n" +
            "publisher,\n" +
            "null,\n" +
            "null,\n" +
            "null,\n" +
            "tax_type,\n" +
            "null,\n" +
            "null,\n" +
            "null,\n" +
            "null,\n" +
            "case when publishdate!='' then STR_TO_DATE(publishdate, '%Y年%m月%d日') end as publishdate,\n" +
            "case when effdate!='' then STR_TO_DATE(effdate, '%Y年%m月%d日') end as effdate,\n" +
            "case when expdate!='' then STR_TO_DATE(expdate, '%Y年%m月%d日') end as expdate,\n" +
            "now(),\n" +
            "a.url\n" +
            " from taxinfo_tmp a\n" +
            " inner join (select distinct min(uid) as uid from taxinfo_tmp group by url) b on(a.uid=b.uid)\n" +
            " inner join (select url ,trim(BOTH ',' FROM concat (\",\",GROUP_CONCAT(tax),\",\")) as tax_type from `taxinfo_tmp` group by url) c on(a.url=c.url)" +
            " where not exists (select orig_url from not_audited_policy d where a.url=d.orig_url)";


    @Autowired
    private Environment env;

    @Autowired
    private JdbcTemplate jdbcTemplate;

    @Bean
    public DataSource dataSource() {
        DruidDataSource dataSource = new DruidDataSource();
        dataSource.setUrl(env.getProperty("spring.datasource.url"));
        dataSource.setUsername(env.getProperty("spring.datasource.username"));//用户名
        dataSource.setPassword(env.getProperty("spring.datasource.password"));//密码
        dataSource.setInitialSize(1);
        dataSource.setMaxActive(20);
        dataSource.setMinIdle(0);
        dataSource.setMaxWait(60000);
        dataSource.setValidationQuery("SELECT 1");
        dataSource.setTestOnBorrow(false);
        dataSource.setTestWhileIdle(true);
        dataSource.setPoolPreparedStatements(false);
        return dataSource;
    }

    @Override
    public void run(String... args) throws Exception {

        RestTemplate template = new RestTemplate();
        template.put(env.getProperty("es.pre_url"),env.getProperty("es.pre_query"));
        JSONObject query = JSONObject.parseObject(es_query);
        long current = 0;
        query.put("size", es_batchsize);
        String tmp_sql = env.getProperty("tmp.sql");
        String[] args_arr = env.getProperty("tmp.es.args").split(",");
        jdbcTemplate.execute(env.getProperty("tmp.truncate.sql"));
        System.out.println("tmp table truncated!");

        long sum=0;
        while (true) {
            long start=System.currentTimeMillis();
            query.put("from", current);
            es_query = query.toJSONString();

            String result = template.postForObject(es_url
                    , es_query, String.class
            );
            long retrieveTime=System.currentTimeMillis();
            System.out.println("current query:"+es_query+" retrieved. "+(retrieveTime-start)+" consumed.");

            JSONObject root = JSONObject.parseObject(result);
            JSONObject hits = root.getJSONObject("hits");

            JSONArray hits_arr = hits.getJSONArray("hits");
            List<Map<String, Object>> data = new ArrayList<>();

            for (int i = 0; i < hits_arr.size(); i++) {
                JSONObject jobj = hits_arr.getJSONObject(i).getJSONObject("_source");
                Map<String, Object> map = new HashMap<String, Object>();

                for (int j = 0; j < args_arr.length; j++) {
                    map.put(args_arr[j], jobj.getString(args_arr[j]));
                }
                data.add(map);
            }

            jdbcTemplate.batchUpdate(tmp_sql, new BatchPreparedStatementSetter() {
                @Override
                public void setValues(PreparedStatement preparedStatement, int i) throws SQLException {
                    for (int j = 0; j < args_arr.length; j++) {
                        String str = String.valueOf(data.get(i).get(args_arr[j]));
                        if (args_arr[j].equals("publisher")) {
                            str = String.join(",", str.split("(\\s+|、+|　+)"));
                        }else if(args_arr[j].equals("content")){
//                            Document doc = Jsoup.parse(str);
//                            List<Element> nodes=doc.getAllElements();
//                            if(nodes.size()==1 ){
//                                Element element=nodes.get(0);
//                                if(element.tag().getName().equals("td")){
//                                    str=element.html();
//                                }
//                            }
                        }
                        preparedStatement.setString(j + 1, str);
                    }
                }

                @Override
                public int getBatchSize() {
                    return data.size();
                }
            });

            long total = hits.getLongValue("total");
            long insertTime=System.currentTimeMillis();
            if (current + es_batchsize < total) {
                current = current + es_batchsize;
                sum=sum+(insertTime-retrieveTime);
                System.out.println(current + " rows of "+total+" inserted in tmp table! "+(insertTime-retrieveTime)+" consumed.");
            } else {
                System.out.println("all "+total + " rows inserted in tmp table! "+(sum+insertTime-retrieveTime)+" consumed.");
                break;
            }
        }
        if(env.getProperty("target.truncate").equals("YES")) {
            jdbcTemplate.execute(env.getProperty("target.truncate.sql"));
            System.out.println("target has been truncated ");
        }
        int total_inserted=jdbcTemplate.update(target_sql);
        System.out.println(total_inserted+"moved to target table!");
    }


}
