package com.lsh.streamgd.sparql;

import com.hp.hpl.jena.rdf.model.Model;

import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.util.FileManager;
import com.lsh.streamgd.utils.StringUtil;
import de.fuberlin.wiwiss.d2rq.jena.ModelD2RQ;
import virtuoso.jena.driver.VirtModel;

import java.io.InputStream;

public class Upload {

    private String driver;
    private String graph;
    private String username;
    private String password;
    private VirtModel dataSet;

    public Upload(String driver, String graph, String username, String password) {
        this.driver = driver;
        this.graph = graph;
        this.username = username;
        this.password = password;
    }

    public void convert2RDF(String mappingFile, String baseURI) {
        dataSet = VirtModel.openDatabaseModel(graph, driver, username, password);

        try {
            Model m = new ModelD2RQ(mappingFile, "N3", baseURI);
            dataSet.add(m);
        } catch (Exception e) {
            System.out.println("ERROR Convert Failed.");
            e.printStackTrace();
        } finally {
            dataSet.close();
        }
    }

    public void convert2RDF(Model m) {
        dataSet = VirtModel.openDatabaseModel(graph, driver, username, password);

        try {
            dataSet.add(m);
        } catch (Exception e) {
            System.out.println("ERROR Convert Failed.");
            e.printStackTrace();
        } finally {
            dataSet.close();
        }
    }

    public void convert2RDF(String owlFile) {
        dataSet = VirtModel.openDatabaseModel(graph, driver, username, password);

        try {
            Model m = ModelFactory.createDefaultModel();
            InputStream in = FileManager.get().open(owlFile);
            m.read(in, "", "TTL");
            dataSet.add(m);
        } catch (Exception e) {
            System.out.println("ERROR Convert Failed.");
            e.printStackTrace();
        } finally {
            dataSet.close();
        }
    }

    public void trace2RDF(String c, String p) {
        dataSet = VirtModel.openDatabaseModel(graph, driver, username, password);

        try {
            String query = StringUtil.getNsPrefixString(dataSet.getNsPrefixMap()) +
                    "DELETE {?s <" + p + "> ?o .} " +
                    "WHERE {?s a <" + c + "> ; <" + p + "> ?o .}";

            SparqlExecution.update(dataSet, query);
        } catch (Exception e) {
            System.out.println("ERROR Convert Failed.");
            e.printStackTrace();
        } finally {
            dataSet.close();
        }
    }

//	public static void uploadRDF(String mappingFile, String baseURI){
//		//Model m = new ModelD2RQ("file:d:/d2rq-0.8.1/mapping.n3", "N3", "http://www.seminfo.com.cn/abc/");
//		Model m = new ModelD2RQ(mappingFile, "N3", baseURI);
//
//		long totalSize = m.size();
//		int fileNum = 1; int eachSize = 300000;
//		int count = 0;
//		int thred = fileNum*eachSize;
//		StmtIterator itor = m.listStatements();
//		//split
//		Model eachModel = ModelFactory.createDefaultModel();
//		while(itor.hasNext()){
//			if(count==thred){
//				//upload or generate the file;
//				FileLocker.getInstance().writeModel(eachModel, "splittedModel_"+fileNum+".N3");
//				//or upload to remote place
//
//				eachModel.removeAll();
//				fileNum++;
//				thred=fileNum*eachSize;
//			}
//
//			Statement stmt = itor.next();
//			eachModel.add(stmt);
//
//			count++;
//		}
//		//upload or generate the file;
//		if(!eachModel.isEmpty()) FileLocker.getInstance().writeModel(eachModel, "splittedModel_"+fileNum+".N3");
//
//        m.write(System.out, "N3");
//        m.close();
//	}
}
