package com.family.relation.utils;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.util.*;

import org.springframework.stereotype.Component;

import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QuerySolution;
import com.hp.hpl.jena.query.ResultSet;
import com.hp.hpl.jena.query.ResultSetFormatter;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.RDFNode;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.tdb.TDB;
//import com.hp.hpl.jena.tdb.TDB;
import com.hp.hpl.jena.vocabulary.RDFS;

@Component("virtuoso")
public class PublicVirtuoso {
	
	public static String sparqlServer = "http://45.78.35.217:8890/sparql";
	public static String jiapuSparql = "http://data.library.sh.cn:8890/sparql";
    public static String prefix = "PREFIX shl: <http://www.library.sh.cn/ontology/> " +
            "PREFIX bf: <http://bibframe.org/vocab/> " +
            "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> "+
            "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> "+
            "PREFIX dc: <http://purl.org/dc/elements/1.1/> ";
	
	//public static String endQuery ="http://www.linked-neuron-data.org:3030/connectome/query";
	//public static String endUpdate = "http://www.linked-neuron-data.org:3030/connectome/update";
	
	
	public static void main(String[] args) {
		QueryExecution qExec = null;		
		try {			
			String queryStr = " insert data "
					+ "{ graph <http://www.semlink.cn/graph/custom>"
						+ "{"
						+ "<http://test/1> <http://test/2> <http://test3>."
						+ "}"
					+ "}";
				
			qExec = QueryExecutionFactory.sparqlService(sparqlServer, queryStr);
			qExec.execSelect();
			
		}finally {
			if (qExec != null)
				qExec.close();
		}
	}
	
	public void mapTest(){
		PublicVirtuoso pub = new PublicVirtuoso();
		
		QueryExecution qExec = null;		
		try {			
			String queryStr = " select distinct * where "
					+ "{ "
					+ "{?s <http://www.ia.cas.cn/baike_baidu/resource/姓名> ?name} union "
					+ "{?s <http://www.ia.cas.cn/baike_baidu/resource/中文名> ?name} union "
					+ "{?s <http://www.ia.cas.cn/baike_baidu/resource/别名> ?name} "
					+ "}";
			qExec = QueryExecutionFactory.sparqlService(sparqlServer, queryStr);
			ResultSet rs  = qExec.execSelect();
			
			//iterate each celebrity from "baidu baike" 
			while(rs.hasNext()){		
				QuerySolution soln = rs.nextSolution();
				Resource celebrityRes = soln.getResource("s");
				String name = soln.getLiteral("name").getString();
				System.out.println("<"+celebrityRes.getURI()+">,"+name);
				
				//map to jiapu database
				pub.MapJiapu(name);
			}
		}finally {
			if (qExec != null)
				qExec.close();
		}
	}
	
	public void jiapuQuery( String query) {

		QueryExecution qExec = null;
		try {

			qExec = QueryExecutionFactory.sparqlService(jiapuSparql, query);
			ResultSet rs = qExec.execSelect();

			ResultSetFormatter.out(System.out, rs);
		} finally {
			if (qExec != null)
				qExec.close();
		}
	}

    class ValueComparator implements Comparator<String> {
        Map<String, Integer> base;
        public ValueComparator(Map<String, Integer> base) {
            this.base = base;
        }

        // Note: this comparator imposes orderings that are inconsistent with equals.
        public int compare(String a, String b) {
            if (base.get(a) >= base.get(b)) {
                return -1;
            } else {
                return 1;
            } // returning 0 would merge keys
        }
    }

    public List<String> Map2JP(String name, String prov, String city, String county, String persons) {
        List<String> mapped = new ArrayList<String>();

        HashMap<String,Integer> map = new HashMap<String,Integer>();
        ValueComparator bvc =  new ValueComparator(map);
        TreeMap<String,Integer> sorted_map = new TreeMap<String,Integer>(bvc);

        //查询该人的所有家谱（人名相同）
        String query = prefix + "select distinct ?work (str(?t) as ?title) " +
                "where {" +
                "   ?s bf:label ?name ; " +
                "      shl:relatedWork ?work ." +
                "   FILTER (?name = '" + name + "'@chs)" +
                "   {select ?work ?t where {" +
                "   graph <http://gen.library.sh.cn/graph/work> {" +
                "       ?work dc:title ?t . " +
                "       FILTER (lang(?t) = 'chs')" +
                "   }" +
                "   }}" +
                "}";

        QueryExecution qExec = QueryExecutionFactory.sparqlService(jiapuSparql, query);
        ResultSet rs  = qExec.execSelect();

        while(rs.hasNext()){
            QuerySolution soln = rs.nextSolution();
            String work = soln.get("work").toString();
            String title = soln.get("title").toString();

            int rate = 5;

            if (title.contains(prov)) {
                if (title.contains(city)) {
                    if (title.contains(county)) {
                        rate = 100;
                    } else {
                        rate = 80;
                    }
                } else {
                    rate = 20;
                }
            }

            map.put(work + "--" + title, rate);
        }

        //根据先祖名人推荐家谱
        if (StringUtil.isBlank(persons)) {} else {
            String[] cells = persons.split(";");

            String filter = "";

            if (cells.length > 0) {
                filter += "FILTER (";
                for (int i=0;i<cells.length;i++) {
                    filter += "(?name = '" + cells[i].trim() + "'@chs)";

                    if (i < cells.length - 1) {
                        filter += "||";
                    }
                }

                filter += ")";
            }

            String q = prefix + "select distinct ?work (str(?t) as ?title) " +
                    "where {" +
                    "   ?s bf:label ?name ; " +
                    "      shl:relatedWork ?work ." + filter +
                    "   {select ?work ?t where {" +
                    "   graph <http://gen.library.sh.cn/graph/work> {" +
                    "       ?work dc:title ?t . " +
                    "       FILTER (lang(?t) = 'chs')" +
                    "   }" +
                    "   }}" +
                    "} limit 5";

            qExec = QueryExecutionFactory.sparqlService(jiapuSparql, q);
            rs  = qExec.execSelect();

            while(rs.hasNext()){
                QuerySolution soln = rs.nextSolution();
                String work = soln.get("work").toString();
                String title = soln.get("title").toString();

                if (map.containsKey(work + "--" + title)) {

                } else {
                    map.put(work + "--" + title, 80);
                }
            }
        }

        //查询不到同名名人（根据姓氏和地域推断）
        //获取名人姓
        String xing = name.substring(0, 1);
        query = prefix + "select ?family where {" +
                "graph <http://gen.library.sh.cn/graph/baseinfo> {" +
                "   ?family a shl:FamilyName ; " +
                "           bf:label ?label . " +
                "   FILTER (lang(?label) = 'chs')" +
                "   FILTER (?label = '" + xing + "'@chs)" +
                "}" +
                "}";

        qExec = QueryExecutionFactory.sparqlService(jiapuSparql, query);
        rs = qExec.execSelect();
        while (rs.hasNext()) {
            QuerySolution soln = rs.nextSolution();
            String family = soln.get("family").toString();

            //根据姓氏和地址推算家谱
            String q = prefix + "select distinct ?work (str(?t) as ?title) " +
                    "where {" +
                    "   ?s bf:label ?name ; " +
                    "      shl:relatedWork ?work . " +
                    "   FILTER (?name != '" + name + "'@chs)" +
                    "   FILTER (lang(?name) = 'chs')" +
                    "   {select ?work ?t where {" +
                    "   graph <http://gen.library.sh.cn/graph/work> {" +
                    "       ?work dc:title ?t ; bf:subject <" + family + "> . " +
                    "       FILTER (lang(?t) = 'chs')" +
                    "       FILTER contains(?t, '" + prov + city + county + "')" +
                    "   }}}" +
                    "} limit 5";

            QueryExecution qe = QueryExecutionFactory.sparqlService(jiapuSparql, q);
            ResultSet r = qe.execSelect();

            while (r.hasNext()) {
                QuerySolution qs = r.nextSolution();
                String work = qs.get("work").toString();
                String title = qs.get("title").toString();

                if (map.containsKey(work + "--" + title)) {

                } else {
                    map.put(work + "--" + title, 60);
                }
            }

            q = prefix + "select distinct ?work (str(?t) as ?title) " +
                    "where {" +
                    "   ?s bf:label ?name ; " +
                    "      shl:relatedWork ?work . " +
                    "   FILTER (?name != '" + name + "'@chs)" +
                    "   FILTER (lang(?name) = 'chs')" +
                    "   {select ?work ?t where {" +
                    "   graph <http://gen.library.sh.cn/graph/work> {" +
                    "       ?work dc:title ?t ; bf:subject <" + family + "> . " +
                    "       FILTER (lang(?t) = 'chs')" +
                    "       FILTER contains(?t, '" + prov + city + "')" +
                    "       FILTER (!contains(?t, '" + prov + city + county + "'))" +
                    "   }}}" +
                    "} limit 5";

            qe = QueryExecutionFactory.sparqlService(jiapuSparql, q);
            r = qe.execSelect();

            while (r.hasNext()) {
                QuerySolution qs = r.nextSolution();
                String work = qs.get("work").toString();
                String title = qs.get("title").toString();

                if (map.containsKey(work + "--" + title)) {

                } else {
                    map.put(work + "--" + title, 20);
                }
            }

            q = prefix + "select distinct ?work (str(?t) as ?title) " +
                    "where {" +
                    "   ?s bf:label ?name ; " +
                    "      shl:relatedWork ?work . " +
                    "   FILTER (?name != '" + name + "'@chs)" +
                    "   FILTER (lang(?name) = 'chs')" +
                    "   {select ?work ?t where {" +
                    "   graph <http://gen.library.sh.cn/graph/work> {" +
                    "       ?work dc:title ?t ; bf:subject <" + family + "> . " +
                    "       FILTER (lang(?t) = 'chs')" +
                    "       FILTER contains(?t, '" + prov + "')" +
                    "       FILTER (!contains(?t, '" + prov + city + county + "'))" +
                    "   }}}" +
                    "} limit 5";

            qe = QueryExecutionFactory.sparqlService(jiapuSparql, q);
            r = qe.execSelect();

            while (r.hasNext()) {
                QuerySolution qs = r.nextSolution();
                String work = qs.get("work").toString();
                String title = qs.get("title").toString();

                if (map.containsKey(work + "--" + title)) {

                } else {
                    map.put(work + "--" + title, 10);
                }
            }
        }

        sorted_map.putAll(map);

        for(Map.Entry<String, Integer> entry : sorted_map.entrySet()) {
            mapped.add(entry.getKey() + " [" + entry.getValue() + "%]");
        }

        return mapped;
    }

	public List<String> MapJiapu(String name){
		if(name == null) return null;
		List<String> mapped = new ArrayList<String>();
		QueryExecution qExec = null;		
		try {
			//deal keyword
			
			//String keyword = name.replaceFirst(name.substring(0, 1),name.substring(0, 1).toLowerCase());
			String keyword = name;
			//keyword = keyword.replaceAll("\\", "\\\\");
			//keyword = keyword.replaceAll("\"", "\\\"");
			
			String queryStr = " select distinct * where {?s <http://bibframe.org/vocab/label> ?word ; <http://www.library.sh.cn/ontology/relatedWork> ?work. FILTER (lang(?word) = 'chs') FILTER (STR(?word) = '" + keyword + "')}";
//System.out.println(queryStr);			
			qExec = QueryExecutionFactory.sparqlService(jiapuSparql, queryStr);
			ResultSet rs  = qExec.execSelect();
			
			while(rs.hasNext()){		
				QuerySolution soln = rs.nextSolution();
				Resource sub = soln.getResource("s");
				//Resource pro = soln.getResource("p");
				String objName = soln.get("work").toString();

                String query = "select (str(?w) as ?work) where {<" + objName + "> <http://purl.org/dc/elements/1.1/title> ?w . FILTER (lang(?w) = 'chs')}";
                qExec = QueryExecutionFactory.sparqlService(jiapuSparql, query);

                ResultSet r = qExec.execSelect();
                while (r.hasNext()) {
                    QuerySolution soln1 = r.nextSolution();
                    //Resource pro = soln.getResource("p");
                    String work = soln1.get("work").toString();

                    mapped.add(objName + "--" + work);
                }
			}
			return mapped;
		} finally {
			if (qExec != null)
				qExec.close();
		}
	}
	
	public String MapBaike(String name){
		StringBuffer sb = new StringBuffer();
		QueryExecution qExec = null;		
		try {
			//deal keyword
			//String keyword = name.replaceFirst(name.substring(0, 1),name.substring(0, 1).toLowerCase());
			//keyword = keyword.replace("\\", "\\\\");
			//keyword = keyword.replace("\"", "\\\"");
			
			String queryStr = prefix+ " select distinct ?s ?pro ?obj where {?s ?p \""+name
			+ "\"^^xsd:string. ?s ?pro ?obj}";
System.out.println(queryStr);			
			qExec = QueryExecutionFactory.sparqlService(sparqlServer, queryStr);
			ResultSet rs  = qExec.execSelect();
			
			while(rs.hasNext()){		
				QuerySolution soln = rs.nextSolution();
				Resource sub = soln.getResource("s");
				Resource pro = soln.getResource("pro").asResource();
				RDFNode node = soln.get("obj");
				if(node.isLiteral()){
					sb.append(pro.getLocalName()+" 是 "+node.asLiteral().getString()+";");
					
				}
			}
			return sb.toString();
		} finally {
			if (qExec != null)
				qExec.close();
		}
	}
	
	/*public List<Map<String,String>> srchOneResrc2Html(String URI){
		
		QueryExecution qExec = null;
		List<Map<String,String>> list = new ArrayList<Map<String,String>>();
		try {
			String sub = "<"+URI+">";
			String query = "SELECT ?predicate ?object WHERE {"+sub+" ?predicate ?object} LIMIT 1000";

			
			qExec = QueryExecutionFactory.sparqlService(jiapuSparql, query);
			

			ResultSet rs  = qExec.execSelect();
			Map<String,String> schemas = new HashMap<String,String>();
			//method 1
			int countStr = 1;
			List<String> varNames = rs.getResultVars();
			for(int i=0;i<varNames.size();i++){
				schemas.put(String.valueOf(countStr++),varNames.get(i));
			}
			list.add(schemas);
			
			int count = 1;	 //limit the query amount
			
			while(rs.hasNext()){
				Map<String,String> tempMap = new HashMap<String,String>();
				
				QuerySolution soln = rs.nextSolution();
				Iterator<String> varItor = soln.varNames();
				while(varItor.hasNext()){
					//
					String varName = varItor.next();
					String value = null;
					RDFNode object = soln.get(varName);
					if(object == null) continue;
					if(object instanceof Resource){
						value = object.asResource().getURI();
					}else {
						value = object.asLiteral().getString();
					}
					tempMap.put(varName, value);
				}
				if(!tempMap.isEmpty()) list.add(tempMap);
				if(count++>2000) break;	//set the up limit = 2000
			}
			return list;
		} finally {
			if (qExec != null)
				qExec.close();
		}
	} */
	

	
	
//	public String srchOneResrc(String URI){
//	
//		QueryExecution qExec = null;
//		try {
//			String sub = "<"+URI+">";
//			String query = "CONSTRUCT {"+sub+" ?p ?o} WHERE {?s ?p ?o} LIMIT 100";
//
//			qExec = QueryExecutionFactory.sparqlService(jiapuSparql, query);
//			Model resultModel = qExec.execConstruct();
//			
//			return "";
//		} finally {
//			if (qExec != null)
//				qExec.close();
//		}
//	}
	



	
	private String getLabelorLocalName(Resource r){
		if(r==null) return null;
		String labelOrLocal;
		Statement stmt = r.getProperty(RDFS.label);
		if(stmt==null){
			labelOrLocal = r.getLocalName();
		}else{
			labelOrLocal = stmt.getObject().asLiteral().getString();
		}
		return labelOrLocal;
	}
	
	public static String getLabelorLocalName(String resStr){
		if(resStr==null) return null;
		String labelOrLocal ="";
		
		QueryExecution qExec = null;
		try{
			String query = "PREFIX rdfs:<http://www.w3.org/2000/01/rdf-schema#> " +
					" SELECT ?label WHERE {<"+resStr.trim()+"> rdfs:label ?label}";
			qExec = QueryExecutionFactory.sparqlService(jiapuSparql, query);
			qExec.getContext().set(TDB.symUnionDefaultGraph, true);
			ResultSet rs  = qExec.execSelect();
			
			if(rs.hasNext()){
				QuerySolution sln = rs.next();
				labelOrLocal = sln.get("label").asLiteral().getString();
			}else{
				labelOrLocal = ModelFactory.createDefaultModel().getResource(resStr).getLocalName();
			}
		} finally {
			if(qExec!=null){
				qExec.close();
			}
		}
		return labelOrLocal;
	}
	

	public List<Map<String,String>> htmlQuery(String query) {

		List<Map<String,String>> list = new ArrayList<Map<String,String>>();
		QueryExecution qExec = null;
		try {
			qExec = QueryExecutionFactory.sparqlService(jiapuSparql, query);
			ResultSet rs = qExec.execSelect();
			//...
			
			Map<String,String> schemas = new HashMap<String,String>();
			//method 1
			int countStr = 1;
			List<String> varNames = rs.getResultVars();
			for(int i=0;i<varNames.size();i++){
				schemas.put(String.valueOf(countStr++),varNames.get(i));
			}
			list.add(schemas);
			
			int count = 1;	 //limit the query amount
			while(rs.hasNext()){
				Map<String,String> tempMap = new HashMap<String,String>();
				
				QuerySolution soln = rs.next();
				Iterator<String> varItor = soln.varNames();
				while(varItor.hasNext()){
					String varName = varItor.next();
					String value = soln.get(varName).toString();
					if(value==null) continue;
					tempMap.put(varName, value);
				}
				if(!tempMap.isEmpty()) list.add(tempMap);
				if(count++>2000) break;	//set the up limit = 2000
			}
			return list;
		} finally {
			if (qExec != null)
				qExec.close();
		}
	}
	

	
	
	
	
	public void commonQuery(OutputStream output, String query) {

		QueryExecution qExec = null;
		try {

			qExec = QueryExecutionFactory.sparqlService(jiapuSparql, query);
			ResultSet rs = qExec.execSelect();

			ResultSetFormatter.out(output, rs);
		} finally {
			if (qExec != null)
				qExec.close();
		}
	}

	public void commonQueryCSV(OutputStream output, String query) {
		// Model model = dataset.getDefaultModel();

		QueryExecution qExec = null;
		try {
			qExec = QueryExecutionFactory.sparqlService(jiapuSparql, query);
			ResultSet rs = qExec.execSelect();
			

			ResultSetFormatter.outputAsCSV(output, rs);
		} finally {
			if (qExec != null)
				qExec.close();
		}
	}

	public void commonQueryJSON(OutputStream output, String query) {
		// Model model = dataset.getDefaultModel();

		QueryExecution qExec = null;
		try {

			qExec = QueryExecutionFactory.sparqlService(jiapuSparql, query);
			qExec.getContext().set(TDB.symUnionDefaultGraph, true);
			ResultSet rs = qExec.execSelect();

			ResultSetFormatter.outputAsJSON(output, rs);
		} finally {
			if (qExec != null)
				qExec.close();
		}
	}

	public void commonQueryXML(OutputStream output, String query) {
		// Model model = dataset.getDefaultModel();

		QueryExecution qExec = null;
		try {

			qExec = QueryExecutionFactory.sparqlService(jiapuSparql, query);
			ResultSet rs = qExec.execSelect();

			ResultSetFormatter.outputAsXML(output, rs);
		} finally {
			if (qExec != null)
				qExec.close();
		}
	}

	public static List<String> mapDirectly(String keywork) {
		List<String> resKBIds = new ArrayList<String>();
		QueryExecution qExec = null;
		try {

			String prefix = " PREFIX owl:<http://www.w3.org/2002/07/owl#> "
					+ " PREFIX baidubaike:<http://www.ia.cas.cn/baike_baidu/resource/> "
					+ " PREFIX rdf:<http://www.w3.org/1999/02/22-rdf-syntax-ns#> "
					+ " PREFIX rdfs:<http://www.w3.org/2000/01/rdf-schema#> "
					+ "PREFIX xsd:<http://www.w3.org/2001/XMLSchema#> ";

			keywork = keywork.replace("\\", "\\\\");
			keywork = keywork.replace("\"", "\\\"");

			String queryStr = " select * {?s rdf:type \"" + keywork
					+ "\"^^xsd:string}";
			// "select * {?s ?p ?o} limit 100";
			// System.out.println(queryStr);
			qExec = QueryExecutionFactory.sparqlService(jiapuSparql, queryStr);
			ResultSet rs = qExec.execSelect();

			while (rs.hasNext()) {
				String temp = rs.next().getResource("s").getLocalName();
				resKBIds.add(temp);
			}
		} finally {
			if (qExec != null)
				qExec.close();
		}
		return resKBIds;
	}

	
	public static void writeFile(String content,String fileName) { 
		  try { 
			  File f = new File(fileName); 
			  if (!f.exists()) { 
				  f.createNewFile(); 
			  }
			  OutputStreamWriter write = new OutputStreamWriter(new FileOutputStream(f,true),"UTF-8"); 
			  BufferedWriter writer = new BufferedWriter(write);   
			  writer.write(content); 
			  writer.newLine();
			  writer.close(); 
		  } catch (Exception e) { 
			  System.out.println(" write error "); 
			  e.printStackTrace(); 
		  } 
	} 
	
	
	
}
