package com.qf.index;

import cn.hutool.core.util.NumberUtil;
import cn.hutool.db.Db;
import cn.hutool.db.Entity;
import cn.hutool.json.JSONUtil;
import com.alibaba.fastjson.JSONObject;

import java.sql.SQLException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;

public class TestQ {

    public static void main(String[] args) throws SQLException {

        String abText = "<p><strong class=\"sub-title\"> INTRODUCTION: </strong>Atopic asthma and allergic rhinitis are common chronic inflammatory diseases affecting lower airways and nasal mucosa, respectively. Several reports demonstrated frequent co-occurrence of these two diseases, " +
                "however, the exact molecular mechanism has not been described. The present study aimed to investigate if small non-coding RNA might be responsible for the co-occurrence of asthma and allergic rhinitis in an animal model of allergic airway " +
                "inflammation.</p><p><strong class=\"sub-title\"> MATERIALS AND METHODS: </strong>As an in vivo model of allergic airway inflammation, we used Brown Norway rats exposed intranasally to house dust mite (HDM). Histological analysis, total IgE concentration, eosinophil counts " +
                "and iNOS gene expression were determined to confirm inflammatory changes. Small RNA sequencing in the lung tissue and nasal epithelium was performed with TruSeq Small RNA Library Preparation Kit and analyzed using the BaseSpace tool. Validation of sequencing results was " +
                "performed using qPCR. To assess the functional role of hsa-miR-223-3p, we transfected normal human bronchial epithelial (NHBE) cells with specific LNA-inhibitor and measured phosphorylated protein level of NF-kB with ELISA. Expression analysis of NF-kB pathway-related genes " +
                "was performed using qPCR with SYBR Green and analyzed in DataAssist v3.01. Statistical analysis were done with STATISTICA version 13.</p><p><strong class=\"sub-title\"> RESULTS: </strong>We found 9 miRNA genes differentially expressed in the lungs of allergic rats. In nasal " +
                "epithelium, only rno-miR-184 was upregulated in animals exposed to HDM. Validation with qPCR confirmed increased expression only for rno-miR-223-3p in the lungs from allergic rats. The expression of this miRNA was also increased in normal bronchial epithelial ALI cell culture " +
                "stimulated with IL-13, but not in cells cultured in monolayer due to the low mRNA level of IL13RA1 and IL13RA2. Transfecting NHBE cells with hsa-miR-223-3p inhibitor increased the amount of phosphorylated NF-kB protein level and expression of MUC5AC, CCL24 and TSLP " +
                "genes.</p><p><strong class=\"sub-title\"> CONCLUSIONS: </strong>These findings suggest that miRNAs that regulate allergic inflammation in the lungs and nasal epithelium are specific for upper and lower airways. Furthermore, our study provides new insight on the role of hsa-miR-223-3p, " +
                "that via targeting NF-kB signaling pathway, regulates the expression of MUC5AC, CCL24 and TSLP. Taken together, our study suggests that miR-223-3p is a regulator of allergic inflammation and could potentially be used to develop novel and targeted therapy for asthma.</p>";
        abText = abText.endsWith("</p>") ? abText.substring(0, abText.length() - "</p>".length()).trim() : abText;
        abText = abText.startsWith("<p>") ? abText.substring("<p>".length()).trim() : abText;
        abText = abText.replace("<strong class=\"sub-title\">", " ").replace(" </strong>", " ").replace("</p><p>", " ").replaceAll(" +", " ").trim();
//        System.out.println(abText);
        Set<List<String>> abSetList = new HashSet<>();
        ReadKw.addAbbSet(abText, abSetList);
        System.out.println("1. " + abSetList);

        String newDataName = "scholar_25_02.";
        String meshTable = newDataName + "help_mesh_tree_gcr_0615";
        String keyWordSumTable = newDataName + "key_word_sum_0124";

        Map<String, String> md5Map = new ConcurrentHashMap<>();
        Map<String, String> reKwMap = cList.getReKwMap();
        Map<String, String> keyWordAndMeshMap = new ConcurrentHashMap<>();
        Map<String, String> originalAndMeshMap = new ConcurrentHashMap<>();
        Map<String, String> keyWordAndStandardMap = Bibliometric.getKeyWordAndStandardMap(keyWordSumTable, keyWordAndMeshMap, md5Map, reKwMap, "id", "is_cn`,`sum`,`key_word");


        ReadKw.readMesh(meshTable, "id", "meshName`,`entryTerms", true, 10, md5Map, keyWordAndMeshMap, originalAndMeshMap, reKwMap);
        System.out.println("md5Map.size(): " + md5Map.size() + " keyWordAndStandardMap.size(): " + keyWordAndStandardMap.size() + " keyWordAndMeshMap.size(): " + keyWordAndMeshMap.size() + " originalAndMeshMap.size(): " + originalAndMeshMap.size() + " reKwMap.size(): " + reKwMap.size());

        Set<String> abKeySet = abText.length() > 10 ? ReadKw.getAbKey(abText, keyWordAndStandardMap) : new HashSet<>();
        System.out.println("2. " + abKeySet);
        ReadKw.addSetList(originalAndMeshMap, new ArrayList<>(abKeySet), abSetList, true);
        System.out.println("3. " + abSetList);

    }

    public static void readEntity(Entity entry, String jourMergeTable, String meshTable, String keyWordSumTable, String htLocationChinaTable, String basicHospUnivDeptTable, String wordAndCodeSetMapTable) throws SQLException {

        String s = "<p> <strong class=\"sub-title\"> Keywords: </strong>";
        Set<String> pinYinSet = PmUnit.getPinYinSet();
        Map<String, En.JidInfo> nlmIdAndJidInfoMap = Bibliometric.getNlmIdAndJidInfoMap(jourMergeTable, "id", "nc_nlmid_list`,`jcr_quartile`,`cite_score`,`jcr_if`,`zky_dalei`,`nc_nlm_title_abbreviation`,`ncbi_mesh`,`ncbi_subject");

        Map<String, Set<Integer>> wordAndCodeSetMap = Drug.getWordAndCodeSetMap(wordAndCodeSetMapTable, "id", "word`,`code_set", "");

        Map<String, String> md5Map = new ConcurrentHashMap<>();
        Map<String, String> reKwMap = cList.getReKwMap();
        Map<String, String> keyWordAndMeshMap = new ConcurrentHashMap<>();
        Map<String, String> originalAndMeshMap = new ConcurrentHashMap<>();
        ReadKw.readMesh(meshTable, "id", "meshName`,`entryTerms", true, 10, md5Map, keyWordAndMeshMap, originalAndMeshMap, reKwMap);

        Map<String, String> keyWordAndStandardMap = Bibliometric.getKeyWordAndStandardMap(keyWordSumTable, keyWordAndMeshMap, md5Map, reKwMap, "id", "is_cn`,`sum`,`key_word");

        Map<String, String> languageMap = uSet.getLanguageMap();
        Map<List<String>, Integer> keyWordSumMap = new ConcurrentHashMap<>();
        Map<String, Integer> cnKeyWordSumMap = new ConcurrentHashMap<>();
        List<String> originalWordList = rSet.getWordList(); // 共1673个，先长的后短的，区分大小写。
        Set<String> smallWordSet = rSet.getSimpWordSet();
        Set<String> backgroundSet = rSet.toLowerCaseSet(rSet.getBackgroundSet());
        Set<String> discussionSet = rSet.toLowerCaseSet(rSet.getDiscussionSet());
        Set<String> methodsSet = rSet.toLowerCaseSet(rSet.getMethodsSet());
        Set<String> resultsSet = rSet.toLowerCaseSet(rSet.getResultsSet());
        Set<String> chinaSet = cList.chinaSet();
        Set<String> citySet = new HashSet<>();
        Set<String> subjectSet = cList.getSubjectSet();
        Set<String> provCitySet = Dept.getProvCitySet(htLocationChinaTable, "id", "prov`,`name", citySet);
        Map<String, String> standardMap = new ConcurrentHashMap<>();
        standardMap.put("department medical", "Department Medical");
        Map<String, Set<String>> deptAndInfoSetMap = new ConcurrentHashMap<>();
        Map<String, Integer> deptAndSumMap = new ConcurrentHashMap<>();
        Map<String, Set<String>> unitAndInfoSetMap = new ConcurrentHashMap<>();
        Map<String, Integer> unitAndSumMap = new ConcurrentHashMap<>();
        Map<String, String> countryMap = cList.getCountryMap();
        Set<String> acadSet = uSet.getAcadSet();
        Set<String> centerSet = uSet.getCenterSet();
        Set<String> collSet = uSet.getCollSet();
        Set<String> hospSet = uSet.getHospSet();
        Set<String> univSet = uSet.getUnivSet();
        Set<String> removeSet = uSet.getRemoveSet();
        Set<String> dellSet = uSet.getDellSet();
        Set<String> toDeptSet = uSet.toDept();
        Map<String, En.FuDanStemEdu> fuDanStemEduMap = Guidelines.getFuDanStemEduMap(basicHospUnivDeptTable, "id", "info`,`fudan`,`stem`,`edu");


        Integer pmId = entry.getInt("pmid");
        String keyword = Utils.getStrField(entry, "keyword");
        keyword = keyword.startsWith(s) && keyword.endsWith("</p>") ? keyword.substring(s.length(), keyword.length() - "</p>".length()).trim() : keyword;
        String title = Utils.getStrField(entry, "article_title");
        String abText = Utils.getStrField(entry, "ab_text");
        abText = abText.endsWith("</p>") ? abText.substring(0, abText.length() - "</p>".length()).trim() : abText;
        abText = abText.startsWith("<p>") ? abText.substring("<p>".length()).trim() : abText;
        abText = abText.replace("<strong class=\"sub-title\">", " ").replace(" </strong>", " ").replace("</p><p>", " ").replaceAll(" +", " ").trim();
        String journal = Utils.getStrField(entry, "jour");
        String pubYear = Utils.getStrField(entry, "pub_year");
        int year = (pubYear.trim().length() == 4 && NumberUtil.isInteger(pubYear.trim())) ? Integer.parseInt(pubYear.trim()) : 1900;
        String authorExtend = entry.getStr("author_extend");
        Set<String> mailSet = new HashSet<>();
        List<String> authList = Bibliometric.getPmAuthList(authorExtend, mailSet);
        String pts = entry.getStr("pts");
        List<En.PubType> pubTypes = JSONObject.parseArray((null == pts || pts.isEmpty()) ? "[]" : pts, En.PubType.class);
        Set<String> ptSet = new HashSet<>(); // 出版类型
        for (En.PubType pubType : pubTypes) {
            String typeTitle = pubType.getTitle().trim();
            if (!typeTitle.isEmpty()) {
                ptSet.add(typeTitle);
            }
        }
        String nlmId = Utils.getStrField(entry, "med_nlm_id");
        int isCns = Utils.is6DaJournals(nlmId, journal, "", "");
        String pubDate = Utils.getStrField(entry, "pub_date").replace("--", "-").trim();
        pubDate = pubDate.endsWith("-") ? pubDate.substring(0, pubDate.length() - 1) : pubDate;
        String di = Utils.dellEnd(Utils.getStrField(entry, "doi"));
        String volume = Utils.getStrField(entry, "volume");
        String issue = Utils.getStrField(entry, "issue");
        String pages = Utils.getStrField(entry, "pages");
        di = di.endsWith(".") ? di.substring(0, di.length() - 1) : di;
        String info = (journal + ". " + (pubDate + ";") + volume + (issue.isEmpty() ? ":" : "(" + issue + "):") + (pages + ". ") + (di.length() > 6 ? ("doi: " + di + ". ") : ""));
        List<En.CategoryAndInfo> categoryAndInfoList = Drug.getDrug(title, keyword, abText, wordAndCodeSetMap);
        List<En.C1Auth> pmC1ListSort = Utils.getPmC1ListSort(authorExtend, pinYinSet);


        String language = Utils.getStrField(entry, "lan");
        String[] split = language.replace(";-;", "ĺļ").replace("; ", "ĺļ").replace(";", "ĺļ").split("ĺļ");
        List<String> languageList = new ArrayList<>();
        for (String p : split) {
            if (p.trim().length() > 2) {
                languageList.add(languageMap.getOrDefault(p.toLowerCase().trim(), p.trim()));
            }
        }

        Guidelines.setC1List(pmC1ListSort, standardMap, acadSet, centerSet, collSet, hospSet, univSet, removeSet, countryMap, deptAndSumMap, dellSet,
                chinaSet, provCitySet, citySet, subjectSet, deptAndInfoSetMap, unitAndInfoSetMap, unitAndSumMap, toDeptSet, fuDanStemEduMap);

        Set<List<String>> kwSetList = new HashSet<>();
        if (!keyword.isEmpty()) {
            List<String> kwList = new ArrayList<>(Arrays.asList(keyword.split("; ")));
            ReadKw.addSetList(originalAndMeshMap, kwList, kwSetList, false);
        }

        Set<List<String>> tiSetList = new HashSet<>();
        Set<String> tiKeySet = title.length() > 4 ? ReadKw.getTiKey(title, keyWordAndStandardMap) : new HashSet<>();
        ReadKw.addSetList(originalAndMeshMap, new ArrayList<>(tiKeySet), tiSetList, true);
        ReadKw.addAbbSet(title, tiSetList);

        Set<List<String>> abSetList = new HashSet<>();
        Set<String> abKeySet = abText.length() > 10 ? ReadKw.getAbKey(abText, keyWordAndStandardMap) : new HashSet<>();
        ReadKw.addSetList(originalAndMeshMap, new ArrayList<>(abKeySet), abSetList, true);
        ReadKw.addAbbSet(abText, abSetList);

    }


}
