package com.chinasoft.reposearch.util;

import com.chinasoft.reposearch.RepoSearchApplication;
import com.chinasoft.reposearch.service.HiveService;
import com.google.gson.Gson;
import com.google.gson.JsonParser;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonToken;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;

import java.io.*;

public class InitHiveDB {
    /**
     * 主函数，用于初始化Hive数据库并加载仓库数据
     *
     * @param args 命令行参数数组，可接受一个参数：
     *             - "-a" 表示使用完整数据文件
     *             - 数字字符串 表示截取数据的条目数
     *             - 无参数时默认截取10000条数据
     */
    public static void main(String[] args) {
        String src = "repo_metadata_cut.json";
        boolean needCut = true;
        int cut = 10000;

        // 检查命令行参数数量，最多只允许一个参数
        if (args.length > 1) {
            System.out.println("Usage: java InitHiveDB [<cut> | -a]");
            return;
        }

        // 处理命令行参数
        if (args.length == 1) {
            if (args[0].equals("-a")) {
                // 使用完整数据文件
                needCut = false;
            } else {
                // 解析截取数量参数
                try {
                    cut = Integer.parseInt(args[0]);
                } catch (NumberFormatException e) {
                    System.out.println("Incorrect cut number");
                    return;
                }
                if (cut < 0) {
                    System.out.println("Cut must not be negative");
                    return;
                }
            }
        }

        try {
            if (needCut) RepoJsonCutter.cutRepoJson("repo_metadata.json", "repo_metadata_cut.json", cut);
            else RepoJsonCutter.cutRepoJson("repo_metadata.json", "repo_metadata_cut.json", -1);
        } catch (FileNotFoundException e) {
            System.out.println("File 'repo_metadata.json' not found");
            return;
        } catch (IOException e) {
            System.out.println("Incorrect Json Format");
            return;
        }

        AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(RepoSearchApplication.class);
        HiveService hiveService = context.getBean(HiveService.class);

        initRepoTable(hiveService);
        loadRepoData(src);

        context.close();
    }

    private static void initRepoTable(HiveService hiveService) {
        hiveService.executeSqlFile("sql/init_repo_table.sql");
    }

    private static void loadRepoData(String src) {
        try (JsonReader reader = new JsonReader(new FileReader(src));
             FileSystem hdfs = FileSystem.get(new Configuration(true));
             BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(hdfs.create(new org.apache.hadoop.fs.Path("/repo/data"), true)))) {
            StringBuilder line = new StringBuilder();
            Gson gson = new Gson();
            reader.beginArray();
            while (reader.hasNext()) {
                line.setLength(0);
                reader.beginObject();
                line.append('{');
                while (reader.hasNext()) {
                    String name = reader.nextName();
                    line.append('"').append(name).append("\":");
                    switch (name) {
                        case "owner", "name", "description", "primaryLanguage", "createdAt", "pushedAt" -> {
                            if (reader.peek().equals(JsonToken.NULL)) {
                                reader.nextNull();
                                line.append("null");
                            } else {
                                line.append(gson.toJson(reader.nextString()));
                            }
                        }
                        case "stars", "forks", "watchers", "languageCount", "topicCount" -> {
                            if (reader.peek().equals(JsonToken.NULL)) {
                                reader.nextNull();
                                line.append("null");
                            } else {
                                line.append(reader.nextLong());
                            }
                        }
                        case "isFork" -> line.append(reader.nextBoolean());
                        case "languages", "topics" -> {
                            reader.beginArray();
                            line.append('[');
                            while (reader.hasNext()) {
                                reader.beginObject();
                                line.append('{');
                                line.append('"').append(reader.nextName()).append("\":");
                                line.append('"').append(reader.nextString()).append("\",");
                                line.append('"').append(reader.nextName()).append("\":");
                                line.append(reader.nextLong());
                                line.append('}');
                                reader.endObject();
                                if (reader.hasNext()) line.append(',');
                            }
                            line.append(']');
                            reader.endArray();
                        }
                    }
                    if (reader.hasNext()) line.append(',');
                }
                line.append('}');
                reader.endObject();
                line.append('\n');
                JsonParser.parseString(line.toString());
                writer.write(line.toString());
            }
            reader.endArray();
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }
}
