import java.io.BufferedWriter;
import java.io.FileWriter;
import java.sql.*;
import java.util.*;

/**
 * @author Xiahu
 * @create 2023/7/18 0018
 */
public class PrestoJDBCTest {
    final static String TEMPLATE_SQL = "insert into  hive.bigdata.article_level_result\n" +
            "select \n" +
            "        '%s' as level_3\n" +
            "        ,access_interface(cast(MAP_FROM_ENTRIES(array[('papers',cast(array_agg(distinct data) as json))]) as json),'http://10.10.9.83:8081/main') as data\n" +
            "from (\n" +
            "        select \n" +
            "                1 as flag,\n" +
            "                cast(MAP_FROM_ENTRIES(array[\n" +
            "                        ('paper_id',cast(a.eid as json)),\n" +
            "                        ('title',cast(a.title[1] as json)),\n" +
            "                        ('year',cast(a.pubYear[1] as json)),\n" +
            "                        ('authors',a.authors),\n" +
            "                        ('reference',a.references)]) as json) as data\n" +
            "        from(\n" +
            "                select\n" +
            "                         a.eid\n" +
            "                        ,array_agg(distinct b.text) as title\n" +
            "                        ,array_agg(distinct replace(cast(json_extract(publication,'$.pubYear') as varchar) ,'\"',''))  as pubYear\n" +
            "                        ,cast(array_agg(distinct cast(MAP_FROM_ENTRIES(array[\n" +
            "                                ('author_id',cast(c.author_id as json)),\n" +
            "                                ('Family Name',cast(c.family_name as json)),\n" +
            "                                ('Middle Name',cast('' as json)),\n" +
            "                                ('Given Name',cast(c.given_name as json)),\n" +
            "                                ('email',cast(null as json)),\n" +
            "                                ('org',cast(array[c.org] as json))]) as json)) as json) as authors\n" +
            "                        ,cast(array_agg(distinct concat('2-s2.0-',d.itemid)) as json) as references\n" +
            "                from hive.mongodb.article_asjc_5 a\n" +
            "                left join (\n" +
            "                        select  \n" +
            "                                _id as id\n" +
            "                                ,x.text\n" +
            "                        from hive.mongodb.article_asjc_5 a\n" +
            "                        cross join unnest (cast(json_extract(a.titles,'$') as array(row(text varchar)))) as x(text)\n" +
            "                ) b on a._id = b.id\n" +
            "                left join (\n" +
            "                        select  \n" +
            "                                 a._id as id\n" +
            "                                 ,y.auid as author_id\n" +
            "                                 ,y.lastName as family_name \n" +
            "                                 ,y.firstName as given_name\n" +
            "                                 ,json_format(json_array_get(json_extract(x.affiliation,'$.organizations') ,json_array_length(json_extract(x.affiliation,'$.organizations'))-1))  as org\n" +
            "                        from hive.mongodb.article_asjc_5 a\n" +
            "                        cross join unnest (cast(json_extract(a.authorGroups,'$') as array(row(affiliation json,authors json)))) as x(affiliation,authors) \n" +
            "                        cross join unnest (cast(json_extract(x.authors,'$') as array(row(auid varchar,lastName varchar,firstName varchar)))) as y(auid,lastName,firstName)\n" +
            "                )c on  a._id = c.id\n" +
            "                left join (\n" +
            "                        select  \n" +
            "                                 a._id as id\n" +
            "                                 ,x.refinfo\n" +
            "                                 ,json_extract(x.refinfo,'$.itemidList')\n" +
            "                                 ,replace(cast(y.itemid as varchar) ,'\"','') as itemid\n" +
            "                        from hive.mongodb.article_asjc_5 a\n" +
            "                        cross join unnest (cast(json_extract(a.references,'$') as array(row(refinfo json)))) as x(refinfo)\n" +
            "                        cross join unnest (cast(json_extract(json_extract(x.refinfo,'$.itemidList'),'$') as array(row(itemid json)))) as y(itemid)\n" +
            "                )d on  a._id = d.id\n" +
            "                where level_3 = '%s'\n" +
            "                group by eid\n" +
            "        ) a \n" +
            ") group by flag";


    public static void main(String[] args) throws Exception {
        String url = "jdbc:presto://10.10.9.63:7670";
        Properties properties = new Properties();
        properties.setProperty("user", "presto");
        properties.setProperty("password", "");
        Connection connection = DriverManager.getConnection(url, properties);

        Class.forName("com.facebook.presto.jdbc.PrestoDriver");
        Statement statement = connection.createStatement();


        LinkedHashMap<String, String> map = new LinkedHashMap<>();
        // 获取article_asjc_5的数据,并且根据num升序排序
        String querySql = "select a.* from (\n" +
                "select level_3 ,count(*) as num from hive.mongodb.article_asjc_5 group by level_3 \n" +
                ") a left join hive.bigdata.article_level_result b on a.level_3 = b.level \n" +
                "where b.data is null\n" +
                "order by a.num asc";
        ResultSet resultSet = statement.executeQuery(querySql);
        while (resultSet.next()) {
            String level = resultSet.getString("level_3");
            String num = resultSet.getString("num");
            map.put(level, num);
        }


        // 将数据落地到文件
        BufferedWriter bw = new BufferedWriter(new FileWriter("article_asjc_5.txt"));
        for (Map.Entry<String, String> entry : map.entrySet()) {
            String level = entry.getKey();
            // 拼接SQL
            String sql = String.format(TEMPLATE_SQL, level, level);
            System.out.println(sql);
            try {
                statement.execute(sql);
                bw.write(String.format("%s : %s", level, "OK"));
                bw.newLine();
            } catch (Exception e) {
                e.printStackTrace();
            }
        }


        bw.close();
        resultSet.close();
        statement.close();
        connection.close();
    }
}
