package common.main.google;

import com.google.gson.Gson;
import common.http.HtmlInfo;
import common.http.SimpleHttp;
import common.system.FileOperation;
import common.utils.DomTree;
import common.utils.MD5Util;
import common.utils.StringUtil;
import net.sf.json.JSONException;
import net.sf.json.JSONObject;
import org.apache.ibatis.annotations.Param;
import org.apache.ibatis.jdbc.Null;
import org.apache.log4j.Logger;
import org.w3c.dom.DocumentFragment;
import org.w3c.dom.NodeList;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

public class ScholarPaperListStartMain {

    private  static Logger logger = Logger.getLogger(ScholarPaperListStartMain.class);
    private  static HtmlInfo html = new HtmlInfo();
    //文章站点
    private static String Springer = "link.springer.com";
    private static String Elsevier = "www.sciencedirect.com";
    private static String WILEY = "onlinelibrary.wiley.com";
    private static String TaylorAndFrancis = "www.tandfonline.com";
    private static String IEEE = "ieeexplore.ieee.org";

    public static void main(String []arg) throws  Exception{

        Config.init();
        final String entrance_url = Config.entranceUrl;//"https://scholar.google.com.hk/scholar?hl=zh-CN&as_sdt=0%2C5&q=<keyword>";

        //  Generative+adversarial+networks


        int crawler_top_k = Config.searchTopKeyNum;//top k

        String content = FileOperation.read("config/googleKeyword.txt");


        String []lines = content.split("\n");


        SimpleHttp http = new SimpleHttp();


        html.setEncode("utf-8");
        //String ua = "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36 QIHU 360SE";
        String ua = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36";

        html.setUa(ua);

        html.setProxy(Config.httpProxy); //设置代理

        //查询出md5 减少重复数据
        String allMD5 = SaveDataToSql.getAllGooglePaperSearchTopKMD5();

        String line = null;
        nextKeyword: for(int a = 0;a < lines.length;a++){
            line = lines[a];
            String searchKeyword  = line.trim();

            if(searchKeyword.startsWith("#")||searchKeyword.length()<=1){
                continue;
            }


            logger.info("searchKeyword:"+searchKeyword);
            String url = entrance_url.replace("<keyword>",searchKeyword.replace("  "," ").replace(" ","+"))
                    .replace("\"","");

            int dataCount = 0;
            while(url!=null && dataCount <= crawler_top_k) {

                logger.info("url:"+url);
                html.setOrignUrl(url);

                try{
                    http.simpleGet(html);  //请求数据
                }catch (IllegalArgumentException e){
                    logger.info("关键词 "+ searchKeyword +" 含有特殊字符，跳过");
                    FileOperation.appendWrite(line + "\n","config/notExecute.txt");
                    Thread.sleep(15 * 1000);
                    continue nextKeyword;
                }catch (NotFoundException e){
                    logger.info(e.getMessage() + "   1分钟后重试");
                    Thread.sleep(60 * 1000);
                    a--;
                    continue;
                }

//                String path  = "C:/Users/lenovo/Desktop/1.html";
//                String htmlcontent = FileOperation.read(path);
//                html.setContent(htmlcontent);

                String htmlContent = html.getContent();//http请求获取的数据
                if(htmlContent==null){
                    logger.warn("获取列表页数据失败，请检查代理是否正确，网络是否连通，15秒后自动重试");
                    Thread.sleep(15 * 1000);
                    continue;
                    //System.exit(-1);
                }
                DocumentFragment node = DomTree.getNode(htmlContent, html.getEncode());
                List<GooglePaperData> list = new ArrayList<>();

                extractPaperInfoList(list, node, GoogleScholarXpath.paperInfo);
                extractTitleList(list, node, GoogleScholarXpath.title);
                extractBriefList(list, node, GoogleScholarXpath.brief);
                extractAuthorList(list, node, GoogleScholarXpath.authors);
                extractAuthorUrlList(list, node, GoogleScholarXpath.authorsUrl);
                extractPubYearList(list, node, GoogleScholarXpath.pubYear);
                extractTitleUrlList(list,allMD5,searchKeyword);
                extractCiteList(list,node,GoogleScholarXpath.citeNum);
                extractAuthorHIndex(list,node,GoogleScholarXpath.authorsUrl,allMD5,searchKeyword);

                // 如果文章的题目包含[PDF] ，[图书]，如果文章MD5重复 ，则删除文章。
                for(int i = list.size() - 1;i >= 0;--i){
                    GooglePaperData data = list.get(i);
                    String dataMd5 = MD5Util.MD5(data.getUrl() + searchKeyword);
                    if(data.getTitle().contains("[PDF]")||data.getTitle().contains("[图书]") || allMD5.contains(dataMd5)){
                        list.remove(i);
                    }
                }
                //对查询到的文章设置orderId。文章的站点不属于已定义的站点，那么去除该文章
                for(int i = 0;i < list.size();++i){
                    GooglePaperData data = list.get(i);
                    //设置orderId
                    data.setOrderId(dataCount + i + 1);
                    if(checkSite(data.getUrl()) == 0){
                        list.remove(i);
                        --i;
                        dataCount++;
                    }else{
                        //如果有[HTML]类似的前缀，则去掉前缀
                        if(data.getTitle().contains("]")){
                            try{
                                //如果有数组越界异常 那么title不做分割。
                                data.setTitle(data.getTitle().split("] ")[1]);
                            }catch (ArrayIndexOutOfBoundsException e){

                            }
                        }
                    }
                }

                url = extractNextUrl(node, GoogleScholarXpath.nextURL);
                for(GooglePaperData data:list){
                    data.setSearchKeyword(searchKeyword);
                    data.setMd5(MD5Util.MD5(data.getUrl() + data.getSearchKeyword()));//生成md5
                }
                SaveDataToSql.insertGooglePaperSearchTopK(list);
                logger.info("save data size:"+list.size() + "  wait 15s");
                dataCount+=list.size();
                Thread.sleep(1000*15);  //sleep 15s
            }

        }
        logger.info("all search crawler top-k is over!!!");
    }

    //判断文章属于哪个站点。
    private static int checkSite(String titleUrl){
        if(titleUrl.contains(Springer)){
            return 1;
        }else if(titleUrl.contains(Elsevier)){
            return 2;
        }else if(titleUrl.contains(WILEY)){
            return 3;
        }else if(titleUrl.contains(TaylorAndFrancis)){
            return 4;
        }else if(titleUrl.contains(IEEE)){
            return 5;
        }else {
            return 0;
        }
    }

    private static void extractPaperInfoList(List<GooglePaperData> list,DocumentFragment node,String xpath){
        logger.info("extractPaperInfoList");
        NodeList nl = DomTree.commonList(xpath, node);
        for(int i=0;i<nl.getLength();i++) {
            String itemContent = nl.item(i).getTextContent();
            GooglePaperData data = new GooglePaperData();
            data.setPaperInfo(itemContent.trim());
            list.add(data);
        }
    }

    private static void extractCiteList(List<GooglePaperData> list,DocumentFragment node,String xpath){
        logger.info("extractCiteList");
        NodeList nl = DomTree.commonList(xpath,node);

        for(int i = 0;i < nl.getLength(); i++){
            String itemContent = nl.item(i).getTextContent();
            if("".equals(itemContent)){
                continue;
            }
            for(int j = 0;j < list.size();j++){
                GooglePaperData data = list.get(j);
                if(data.getPaperInfo().contains(itemContent)){
                    String citeNum = itemContent.split("：")[1];
                    data.setCiteNum(Integer.parseInt(citeNum));
                }
            }
        }
    }

    private static void extractAuthorHIndex(List<GooglePaperData> list,DocumentFragment node,String xpath,String allMD5,String searchKeyword) throws InterruptedException {
        logger.info("extractAuthorHIndex");

        NodeList nl = DomTree.commonList(xpath,node);

        for(int i = 0;i < nl.getLength();i++){
            String itemContent = nl.item(i).getTextContent();
            for(int j = 0;j < list.size();j++){
                GooglePaperData data = list.get(j);
                if(checkSite(data.getUrl()) != 0 && data.getPaperInfo().contains(itemContent) && !allMD5.contains(MD5Util.MD5(data.getUrl() + searchKeyword))){
                    String authorUrl = nl.item(i).getAttributes().getNamedItem("href").getTextContent();
                    String authorHIndex = data.getAuthorsIndexH() + itemContent + "<:>" ;
                    SimpleHttp http = new SimpleHttp();
                    html.setOrignUrl("https://scholar.google.com.hk" + authorUrl);
                    logger.info("15s 后开始获取作者hindex");
                    Thread.sleep(15 * 1000);
                    http.simpleGet(html);
                    DocumentFragment authorNode = DomTree.getNode(html.getContent(),"utf-8");
                    NodeList authorNodeList = null;

                    try{
                        authorNodeList = DomTree.commonList(GoogleScholarXpath.AuthorInfo_hIndex,authorNode);
                    }catch (NullPointerException e){

                        logger.info("获取作者信息时，空指针异常，等待15s");
                        Thread.sleep(15 * 1000);
                        continue;
                    }
                    if(authorNodeList.getLength() > 0){
                        String hindex = authorNodeList.item(0).getTextContent();
                        authorHIndex = authorHIndex + hindex + ";";
                    }
                    data.setAuthorsIndexH(authorHIndex);
                }
            }
        }
    }

    private static void extractBriefList(List<GooglePaperData> list,DocumentFragment node,String xpath){
        logger.info("extractBriefList");
        NodeList nl = DomTree.commonList(xpath, node);

        for(int i=0;i<nl.getLength()&&i<list.size();i++) {
            String itemContent = nl.item(i).getTextContent();
            for(int k=i;k<list.size();k++) {
                GooglePaperData data = list.get(k);
                if (data.getPaperInfo().contains(itemContent)) {
                    data.setBrief(itemContent.trim());
                    break;
                }
            }
        }
    }
    private static void extractAuthorList(List<GooglePaperData> list,DocumentFragment node,String xpath){
        logger.info("extractAuthorList");
        NodeList nl = DomTree.commonList(xpath, node);
        for(int i=0;i<nl.getLength()&&i<list.size();i++) {
            String itemContent = nl.item(i).getTextContent();
            for(int k=i;k<list.size();k++) {
                GooglePaperData data = list.get(k);
                if(data.getPaperInfo().contains(itemContent)) {
                    if(itemContent.contains("-")){
                        itemContent = itemContent.split("-")[0];
                    }
                    data.setAuthors(itemContent.trim());
                    break;
                }
            }
        }
    }
    private static void extractAuthorUrlList(List<GooglePaperData> list,DocumentFragment node,String xpath){
        logger.info("extractAuthorUrlList");
        NodeList nl = DomTree.commonList(xpath, node);
        HashMap<String,String> authorUrlmap = new HashMap<>();

        for(int i=0;i<nl.getLength();i++) {
            String itemContent = nl.item(i).getTextContent();
            String itemhref = nl.item(i).getAttributes().getNamedItem("href").getTextContent();
            itemhref = "https://scholar.google.com.hk"+itemhref.trim();
            authorUrlmap.put(itemContent.trim(),itemhref.trim());

        }
        for(GooglePaperData  data:list){
            String authorStr = data.getAuthors();
            if(authorStr==null||authorStr.length()==0){
                continue;
            }
            String []authors = authorStr.split(",");
            String authorUrl = "";
            for(String author:authors){
                author=author.trim();
                if(authorUrlmap.get(author)!=null){
                    authorUrl+=author+":"+authorUrlmap.get(author);
                }
            }
            data.setAuthorsUrl(authorUrl);
        }

    }
    private static void extractPubYearList(List<GooglePaperData> list,DocumentFragment node,String xpath){
        logger.info("extractPubYearList");
        NodeList nl = DomTree.commonList(xpath, node);
        for(int i=0;i<nl.getLength()&&i<list.size();i++) {
            String itemContent = nl.item(i).getTextContent();
            for(int k=i;k<list.size();k++) {
                GooglePaperData data = list.get(k);
                if (data.getPaperInfo().contains(itemContent)) {
                    String yearStr = StringUtil.extractOne(itemContent,"(19|20)\\d{2}");
                    if("".equals(yearStr)){
                        data.setPubYear(0);
                        break;
                    }
                    data.setPubYear(Integer.parseInt(yearStr.trim()));
                    break;
                }
            }
        }
    }

    private static String extractNextUrl(DocumentFragment node,String xpath){
        logger.info("extractNextUrl");
        NodeList nl = DomTree.commonList(xpath, node);
        String itemContent = null;
        String nextUrl = null;
        for(int i=0;i<nl.getLength();i++) {
            itemContent = nl.item(i).getTextContent();
            nextUrl = "https://scholar.google.com.hk"+itemContent.trim();
        }
        if(itemContent == null || "".equals(itemContent)){
            return null;
        }
        return nextUrl;

    }

    private static void extractTitleList(List<GooglePaperData> list,DocumentFragment node,String xpath){
        logger.info("extractTitleList");
        NodeList nl = DomTree.commonList(xpath, node);
        for(int i=0;i<nl.getLength()&&i<list.size();i++) {
            String itemContent = nl.item(i).getTextContent();
            GooglePaperData data = list.get(i);
            NodeList titleUrlNodeList = DomTree.commonList(GoogleScholarXpath.url.replace("<n>",i + 1 + ""),node);
            String titleUrl = "";
            for(int j = 0;j < titleUrlNodeList.getLength();j++){
                titleUrl = titleUrlNodeList.item(j).getTextContent();
            }
            data.setUrl(titleUrl);
           /* if(data.getPaperInfo().contains(itemContent)) {
                if(itemContent.contains("] ")){
                    itemContent=itemContent.split("] ")[1];
                }
                data.setTitle(itemContent);
            }*/
            data.setTitle(itemContent);
        }
    }

    private static DocumentFragment getDetailsNode(String url) throws InterruptedException , NotFoundException{
        logger.info("请求文章详细信息的url:" + url);
        //去除双引号
        url = url.replace("\"","");
        SimpleHttp http = new SimpleHttp();
        html.setEncode("utf-8");
        String ua = "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36 QIHU 360SE";
        html.setUa(ua);
        if(checkSite(url) != 4){
            html.setProxy(Config.httpProxy); //设置代理
        }else{
            html.setProxy(null);
        }
        html.setOrignUrl(url);
        http.simpleGet(html);  //请求数据
        String htmlContent = html.getContent();//http请求获取的数据
        if(htmlContent==null){
            logger.warn("获取详情页数据失败，请检查代理是否正确，网络是否连通，15秒后自动重试");
            Thread.sleep(15 * 1000);
            return getDetailsNode(url);
//            System.exit(-1);
        }
        DocumentFragment dataNode = DomTree.getNode(htmlContent, html.getEncode());

        return dataNode;
    }

    /**
     * 获取Springer站点的详情页的信息
     * 作者所属机构，关键词，期刊，
     * @return
     */
    private static Map<String,String> getSpringerDetailsInfo(DocumentFragment dataNode){

        NodeList journalList = DomTree.commonList(GoogleScholarXpath.Springer_journal,dataNode);
        //获取期刊信息
        String journal = "";
        for(int i =0;i < journalList.getLength();i++){
            journal = journalList.item(0).getTextContent();
            break;
        }

        NodeList authorNames = DomTree.commonList(GoogleScholarXpath.Springer_AuthorsName,dataNode);
        //作者以及作者所属组机构的序号。
        String authorAffNo = "";
        for(int i = 0;i <authorNames.getLength();i++){
            NodeList authorAffNoList = DomTree.commonList(GoogleScholarXpath.Springer_AuthorsAffNo.replace("<n>",i + 1 + ""),dataNode);
            authorAffNo = authorAffNo + authorNames.item(i).getTextContent() + ":";
            String authorAffNoTmp = "";
            for(int j = 0;j < authorAffNoList.getLength();j++){
                authorAffNoTmp += authorAffNoList.item(j).getTextContent() + ",";
            }
            authorAffNo = authorAffNo + authorAffNoTmp + ";" ;
        }
       //获取机构的信息
       NodeList authorsAffNum = DomTree.commonList(GoogleScholarXpath.Springer_AffiliationsNum,dataNode);
       String  affName = "";
       for(int i = 0;i < authorsAffNum.getLength();i++){
            NodeList affNameList = DomTree.commonList(GoogleScholarXpath.Springer_AffiliationName.replace("<n>",i + 1 + ""),dataNode);
            affName = affName + (i + 1) + ":";
            for(int j = 0;j < affNameList.getLength();j++){
                affName = affName + affNameList.item(j).getTextContent() + ",";
            }
            affName += ";";
       }
       //获取关键词
       NodeList keywordsList = DomTree.commonList(GoogleScholarXpath.Springer_Keywords,dataNode);
       String keywords = "";
       for(int i = 0;i < keywordsList.getLength();i++){
           keywords = keywords + keywordsList.item(i).getTextContent() + ";";
       }

        //获取参考文献信息以及参考文献的作者信息
        NodeList referencesNodeList = DomTree.commonList(GoogleScholarXpath.Springer_references,dataNode);
        String references = "";
        for(int i = 0;i < referencesNodeList.getLength();i++){
            NodeList referencesSpanNodeList = DomTree.commonList(
                    GoogleScholarXpath.Springer_references_span.replace("<n>",i + 1 + ""),dataNode);
            if(referencesSpanNodeList.getLength() > 0){
                references = references + referencesNodeList.item(i).getTextContent()
                        .replace(referencesSpanNodeList.item(0).getTextContent(),"") + ";";
            }else {
                references = references + referencesNodeList.item(i).getTextContent() + ";";
            }
        }
        Map map = new HashMap();
        map.put("journal",journal);
        map.put("authorAffNo",authorAffNo);
        map.put("affName",affName);
        map.put("articleKeywords",keywords);
        map.put("references",references);
       return map;
    }

    /**
     * 获取Elsevier站点的详情页的信息
     * 作者所属机构，关键词，期刊，
     * @param dataNode
     * @return
     */
    private static Map<String,String> getElsevierDetailsInfo(DocumentFragment dataNode) throws InterruptedException {
        NodeList journalNodeList = DomTree.commonList(GoogleScholarXpath.Elsevier_journal,dataNode);
        String journal = "";
        for(int i = 0; i < journalNodeList.getLength();i++){
            journal = journalNodeList.item(0).getTextContent();
        }
        //获取关键词
        NodeList affNodeList = DomTree.commonList(GoogleScholarXpath.Elsevier_Keywords,dataNode);
        String articleKeywords = "";
        for(int i = 0;i < affNodeList.getLength();i++){
            String keywordsTmp = affNodeList.item(i).getTextContent();
            articleKeywords = articleKeywords + keywordsTmp + ";";
        }
        //获取机构信息,以及作者与机构的对应关系
        String affiliationNameJson = DomTree.commonList(GoogleScholarXpath.Elsevier_AffiliationName,dataNode).item(0).getTextContent();
        Gson gson = new Gson();
        Map jsonStr = gson.fromJson(affiliationNameJson,Map.class);
        List<Map> list = ((List)((Map)((List)((Map)jsonStr.get("authors")).get("content")).get(0)).get("$$"));
        String affName = "";
        String authorAffNo = "";
        for(int i = 0;i < list.size();i++){
            if("affiliation".equals(list.get(i).get("#name"))){
                List<Map> list1 = ((List<Map>)list.get(i).get("$$"));
                String affNameTmp = "";
                if (list1.size() > 1){
                    for(int j = 0;j < list1.size();j++){
                        Map map = list1.get(j);
                        if(map.get("#name").equals("label")){
                            affNameTmp = map.get("_") + ":" + affNameTmp;
                        }else if(map.get("#name").equals("textfn")){
                            affNameTmp = affNameTmp + map.get("_");
                        }
                    }
                }else if (list1.size() == 1){
                    affNameTmp = (String)list1.get(0).get("_");
                }
                affName = affName + affNameTmp + ";";
            }else if("author".equals(list.get(i).get("#name"))){
                List<Map> authorList = ((List<Map>)list.get(i).get("$$"));
                String authorAffNoTmp = "";
                for(int j = 0;j < authorList.size();j++){
                    if("given-name".equals(authorList.get(j).get("#name"))){
                        authorAffNoTmp = authorList.get(j).get("_") + authorAffNoTmp;
                    }else if("surname".equals(authorList.get(j).get("#name"))){
                        authorAffNoTmp = authorAffNoTmp + " " + authorList.get(j).get("_") + ":";
                    }else if("cross-ref".equals(authorList.get(j).get("#name")) && (
                            ((Map<String,String>)(authorList.get(j).get("$"))).get("refid").startsWith("AFF") ||
                                    ((Map<String,String>)(authorList.get(j).get("$"))).get("refid").startsWith("aff"))){

                        List<Map> affNoList = (List<Map>)authorList.get(j).get("$$");
                        if(affNoList != null){
                            for(int k = 0;k < affNoList.size();k++){
                                try {
                                    authorAffNoTmp = authorAffNoTmp + affNoList.get(k).get("_") + ",";
                                }catch (NullPointerException e){
                                    continue;
                                }
                            }
                        }
                    }
                    if(j == (authorList.size() - 1)){
                        authorAffNoTmp = authorAffNoTmp + ";";
                    }
                }
                authorAffNo = authorAffNo + authorAffNoTmp;
            }
        }

        String token = (String)((Map)jsonStr.get("article")).get("entitledToken");
        String pii = (String) ((Map) jsonStr.get("article")).get("pii");
        SimpleHttp http = new SimpleHttp();
        html.setOrignUrl("www.sciencedirect.com/sdfe/arp/pii/"+ pii +"/references?entitledToken=" + token);
        logger.info("Elsevier站点获取参考文献信息需要请求url，等待15s后开始获取");
        Thread.sleep(15 * 1000);
        logger.info("获取参考文献信息");
        try{
            http.simpleGet(html);
        }catch (NotFoundException e){
            html.setContent(null);
        }
        Map referencesJson = gson.fromJson(html.getContent(),Map.class);
        List<Map> referencesList;
        try{
            referencesList = (List<Map>)((List<Map>)((List<Map>)referencesJson.get("content")).get(0).get("$$")).get(1).get("$$");
        }catch (IndexOutOfBoundsException e){
            referencesList = new ArrayList<>();
        }catch (NullPointerException e){
            referencesList = new ArrayList<>();
        }
        String references = "";
        for(int i = 0 ;i < referencesList.size();i++){
            String ref = null;
            try{
                ref = (String) ((List<Map>)referencesList.get(i).get("$$")).get(1).get("#name");
            }catch (NullPointerException e){
                continue;
            }
            if("reference".equals(ref)){
                List<Map> oneReferenceList = (List<Map>)((List<Map>) ((List<Map>)((Map)referencesList.get(i)).get("$$")).get(1).get("$$")).get(0).get("$$");

                if(oneReferenceList.size() > 1){
                    references = references + ((List<Map>)oneReferenceList.get(1).get("$$")).get(0).get("_") + "<:>";

                    for(int j = 0;j < ((List<Map>)oneReferenceList.get(0).get("$$")).size();j++){
                        List<Map> authorName = (List<Map>)((List<Map>)oneReferenceList.get(0).get("$$")).get(j).get("$$");
                        if(authorName!=null && authorName.size() >= 2){
                            references = references + authorName.get(0).get("_") + " " + authorName.get(1).get("_") + ",";
                        }
                    }
                    references = references + ";";
                }
            }
        }
        Map map = new HashMap();
        map.put("journal",journal);
        map.put("articleKeywords",articleKeywords);
        map.put("affName",affName);
        map.put("authorAffNo",authorAffNo);
        map.put("references",references);
        return map;
    }
    /**
     * 获取Wiley站点的详情页的信息
     * 作者所属机构，关键词，期刊，
     * @param dataNode
     * @return
     */
    private static Map<String,String> getWileyDetailsInfo(DocumentFragment dataNode){
        NodeList authorsNameNodeList = DomTree.commonList(GoogleScholarXpath.WILEY_AuthorsName,dataNode);
        String authorAffNo = "";
        String affName = "";
        for(int i = 0 ;i < authorsNameNodeList.getLength();i++){
            String authorName = authorsNameNodeList.item(i).getTextContent();
            NodeList affNameNodeList = DomTree.commonList(GoogleScholarXpath.WILEY_AffiliationName.replace("<n>",i + 1 + ""),dataNode);
            if(affNameNodeList.getLength() > 0){
                authorAffNo = authorAffNo + authorName + ":" + (i + 1) + ";";
                String affNameTmp = affNameNodeList.item(0).getTextContent();
                affNameTmp = affNameTmp.replaceAll("\n","");
                affName = affName + (i + 1) + ":" + affNameTmp + ";";
            }else{
                authorAffNo = authorAffNo + authorName + ":" + ";";
            }
        }
        NodeList articleKeywordsNodeList = DomTree.commonList(GoogleScholarXpath.WILEY_Keywords,dataNode);
        String keywords = "";
        for(int i = 0;i < articleKeywordsNodeList.getLength();i++){
            keywords = keywords + articleKeywordsNodeList.item(i).getTextContent() + ";";
        }
        // 获取journal
        String journal = "";
        NodeList journalNodeList = DomTree.commonList(GoogleScholarXpath.WILEY_journal,dataNode);
        for(int i = 0;i < journalNodeList.getLength();i++){
            journal = journal + journalNodeList.item(i).getTextContent();
        }

        //获取参考文献以及参考文献作者信息
        String references = "";
        NodeList referencesNameNodeList = DomTree.commonList(GoogleScholarXpath.WILEY_referencesName,dataNode);

        for(int i = 0 ;i < referencesNameNodeList.getLength();i++){
            references = references + referencesNameNodeList.item(i).getTextContent() + "<:>";
            NodeList referencesAuthorNodeList = DomTree.commonList(GoogleScholarXpath.WILEY_referencesAuthor.replace("<n>",i + 1 + ""),dataNode);
            if(referencesAuthorNodeList.getLength() > 0){
                references = references + referencesAuthorNodeList.item(0).getTextContent();
            }
            references = references + ";";
        }
        Map map = new HashMap();
        map.put("authorAffNo",authorAffNo);
        map.put("affName",affName);
        map.put("articleKeywords",keywords);
        map.put("journal",journal);
        map.put("references",references);
        return map;
    }

    /**
     * 获取Taylor & Francis站点的详情页的信息
     * 作者所属机构，关键词，期刊，
     * @param dataNode
     * @return
     */
    private static Map<String,String> getTaylorAndFrancisDetailsInfo(DocumentFragment dataNode) throws InterruptedException {
        //获取期刊
        String journal = "";
        NodeList journalNodeList = DomTree.commonList(GoogleScholarXpath.TaylorAndFrancis_journal,dataNode);
        for(int i = 0;i < journalNodeList.getLength();i++){
            journal = journal + journalNodeList.item(i).getTextContent();
        }
        //获取作者机构
        String affName = "";
        String authorAffNo = "";
        NodeList authorNodeList = DomTree.commonList(GoogleScholarXpath.TaylorAndFrancis_AuthorsName,dataNode);

        for(int i = 0;i < authorNodeList.getLength();i++){
            authorAffNo = authorAffNo + authorNodeList.item(i).getTextContent() + ":" + (i + 1) + ";";
            NodeList affNodeList = DomTree.commonList(GoogleScholarXpath.TaylorAndFrancis_AffiliationName.replace("<n>",(i + 1) + ""),dataNode);
            String affNameTmp = (i + 1) + ":";
            for(int j = 0;j < affNodeList.getLength();j++){
                //将作者名字中的机构去掉。
                authorAffNo = authorAffNo.replace(affNodeList.item(j).getTextContent(),"");
                affNameTmp = affNameTmp + affNodeList.item(j).getTextContent() + "+";
            }
            affName = affName + affNameTmp + ";";
        }
        String keywords = "";
        String references = "";
        NodeList referencesUrlNodeList = DomTree.commonList(GoogleScholarXpath.TaylorAndFrancis_referencesUrl,dataNode);
        if(referencesUrlNodeList.getLength() > 0){
            html.setOrignUrl("www.tandfonline.com" + referencesUrlNodeList.item(0).getTextContent());
            SimpleHttp http = new SimpleHttp();
            html.setProxy(null);
            logger.info("Taylor & Francis站点的参考文献信息需要请求URL，等待15s后开始获取");
            Thread.sleep(15 * 1000);
            logger.info("获取参考文献信息");
            DocumentFragment referencesNode = DomTree.getNode(html.getContent(),"UTF-8");
            NodeList referencesNodeList = DomTree.commonList(GoogleScholarXpath.TaylorAndFrancis_referencesName,referencesNode);
            for(int i = 0;i < referencesNodeList.getLength();i++){
                references = references + referencesNodeList.item(i).getTextContent() + "<:>";
                NodeList referencesAuthorNodeList = DomTree.commonList(GoogleScholarXpath.TaylorAndFrancis_referencesAuthor.replace("<n>",i + 1 + ""),referencesNode);
                for(int j = 0;j < referencesAuthorNodeList.getLength();j++){
                    references = references + referencesAuthorNodeList.item(j).getTextContent() + ",";
                }
                references = references + ";";
            }
        }
        Map map = new HashMap();
        map.put("journal",journal.trim());
        map.put("affName",affName);
        map.put("authorAffNo",authorAffNo);
        map.put("articleKeywords",keywords);
        map.put("references",references);
        return map;
    }


    /**
     * 获取IEEE站点的详情页的信息
     * @param dataNode
     * @return
     */
    private static Map<String,String> getIEEEDetailsInfo(DocumentFragment dataNode) throws InterruptedException {
        String journal = "";
        String affName = "";
        String authorAffNo = "";
        String keywords = "";
        String references = "";
        NodeList jsonNodeList = DomTree.commonList(GoogleScholarXpath.IEEE_json,dataNode);
        String json = "";
        for(int i = 0;i < jsonNodeList.getLength();i++){
            String content = jsonNodeList.item(i).getTextContent();
            if(content.contains("global.document.metadata")){
               json = content.split("global.document.metadata=")[1].split("};")[0];
               json = json + "}";
               break;
            }
        }
        JSONObject a = null;
        try{
            a = JSONObject.fromObject(json);
        }catch (JSONException e){
            return null;
        }
        Gson gson = new Gson();
        Map detailsInfoMap = gson.fromJson(a.toString(),Map.class);
        //获取作者机构
        List<Map> authorsList = (List<Map>) detailsInfoMap.get("authors");
        if(authorsList != null){
            for(int i = 0;i < authorsList.size();i++){
                authorAffNo = authorAffNo + authorsList.get(i).get("name") + ":" + (i + 1) + ";";
                affName = affName + (i + 1) + ":" + ((List<String>)authorsList.get(i).get("affiliation")).get(0).replace("\n","") + ";";
            }
        }
        //获取期刊
        List<String> journalList = ((List<String>) detailsInfoMap.get("displayPublicationTitle"));
        if(journalList != null && journalList.size() > 0){
            journal = journalList.get(0);
        }
        //获取关键词
        List<Map> keywordsList = (List<Map>) detailsInfoMap.get("keywords");
        if(keywordsList != null){
            for(int i = 0;i < keywordsList.size();i++){
                Map keywordsMap = keywordsList.get(i);
                if(keywordsMap.get("type") != null){
                    keywords = keywords + keywordsMap.get("type") + ":";
                }else{
                    keywords = keywords + "noType:";
                }
                List<String> kwdList = (List<String>)keywordsMap.get("kwd");
                for(int j = 0;j < kwdList.size();j++){
                    keywords = keywords + kwdList.get(j) + ",";
                }
                keywords = keywords + ";";
            }
        }

        //获取参考文献和参考文献作者信息
        String articleNumber = (String) detailsInfoMap.get("articleNumber");
        if(articleNumber != null){
            SimpleHttp http = new SimpleHttp();
            html.setOrignUrl("ieeexplore.ieee.org/xpl/dwnldReferences?arnumber=" + articleNumber);
            logger.info("IEEE站点获取参考文献信息需要请求URL，等待15s后开始获取");
            Thread.sleep(15 * 1000);
            logger.info("获取参考文献信息");

            DocumentFragment referencesNode = DomTree.getNode(html.getContent(),"UTF-8");
            NodeList referencesNodeList = DomTree.commonList(GoogleScholarXpath.IEEE_references,referencesNode);

            if(referencesNodeList.getLength() > 0){
                String referencesTmp = referencesNodeList.item(0).getTextContent();
                int i = 1;
                while (referencesTmp.contains("\t\t\t\n\t\t" + i + ".")){
                    String [] referenceArray;
                    if(referencesTmp.contains("\t\t\t\n\t\t" + (i + 1) + ".")){
                        String oneReference = referencesTmp.substring(referencesTmp.indexOf( "\t\t\t\n\t\t" + i + "."),referencesTmp.indexOf("\t\t\t\n\t\t" + (i + 1) + "."));
                        referenceArray = oneReference.split("\"");
                    }else {
                        String oneReference = referencesTmp.substring(referencesTmp.indexOf("\t\t\t\n\t\t" + i + "."));
                        referenceArray = oneReference.split("\"");
                    }
                    if(referenceArray.length > 1){
                        references = references + referenceArray[1].replace("\n\t\t\n\t\t","").replace("&amp;","") + "<:>";
                        String [] authors = referenceArray[0].replaceFirst("\t\t\t\n\t\t" + i + ".","").split("\n\t\t\n\t\t");
                        for(String author:authors){
                            if(!"".equals(author)){
                                references = references + author.replace("&amp;","") + ",";
                            }
                        }
                        references = references + ";";
                    }
                    i++;
                }
            }
        }

        Map map = new HashMap();
        map.put("journal",journal);
        map.put("affName",affName);
        map.put("authorAffNo",authorAffNo);
        map.put("articleKeywords",keywords);
        map.put("references",references);
        return map;
    }

    private static void extractTitleUrlList(List<GooglePaperData> list,String allMD5,String searchKeyword) throws InterruptedException {
        logger.info("获取文章详细信息");

        for(int i=0;i<list.size();i++) {
            logger.info("开始获取第"+ (i + 1) +"篇文章的详细信息");
            GooglePaperData data = list.get(i);
            //判断该文章是否已经在数据库中存在
            if(allMD5.contains(MD5Util.MD5(data.getUrl() + searchKeyword))){
                continue;
            }
            Map map = null;
            if(checkSite(data.getUrl())!=0){
                try{
                    //处理每个站点的信息：作者机构 关键词 摘要 期刊,
                    DocumentFragment dataNode = getDetailsNode(data.getUrl());
                    if(data.getUrl().contains(Springer)){
                        map = getSpringerDetailsInfo(dataNode);
                    }else if(data.getUrl().contains(Elsevier)){
                        map = getElsevierDetailsInfo(dataNode);
                    }else if(data.getUrl().contains(WILEY)){
                        map = getWileyDetailsInfo(dataNode);
                    }else if(data.getUrl().contains(TaylorAndFrancis)){
                        map = getTaylorAndFrancisDetailsInfo(dataNode);
                    }else if(data.getUrl().contains(IEEE)){
                        map = getIEEEDetailsInfo(dataNode);
                    }
                    if(map == null){
                        Thread.sleep(15 * 1000);
                        continue;
                    }
                }catch (NotFoundException e){
                    e.printStackTrace();
                    logger.info("第"+ (i + 1) +"篇文章的详情页不可达，15后采集下一篇");
                    Thread.sleep(15 * 1000);
                    continue;
                }
            }else{
                continue;
            }
            data.setArticleKeywords((String) map.get("articleKeywords"));
            data.setAuthorAffNo((String) map.get("authorAffNo"));
            data.setAffName((String) map.get("affName"));
            data.setJournal((String) map.get("journal"));
            logger.info("信息获取成功，15s后获取第" + (i + 1 + 1) + "篇文章的详细信息");
            //等待15s
            Thread.sleep(1000*15);
        }
    }
}
