package xw4g08;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.zip.GZIPInputStream;

import org.openrdf.model.Statement;
import org.openrdf.repository.Repository;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.http.HTTPRepository;
import org.openrdf.repository.util.RDFInserter;
import org.openrdf.rio.RDFFormat;
import org.openrdf.rio.RDFHandler;
import org.openrdf.rio.RDFHandlerException;
import org.openrdf.rio.RDFParseException;
import org.openrdf.rio.RDFParser;
import org.openrdf.rio.Rio;

public class DataLoader {

	static RDFFormat informat = RDFFormat.TURTLE;
	static boolean append = true;
	
	public static void main(String[] args) {
		
		File datadir = Configuration.OUT_DIR;
		//add data to remote repositories
		Configuration.init();
		List<String> urls = Configuration.REP_URLs;
		//upload(datadir,urls,append);
		File[] dirs = datadir.listFiles();
		for(File sourceDir:dirs) {
			upload(sourceDir,urls);
		}
		
		System.out.println("Finished.");
	}
	
	public static void upload(File sourceDir, List<String> urls) {
		File[] datafiles = sourceDir.listFiles();
		Arrays.sort(datafiles);
		System.out.println("Processing files from " + sourceDir);
		ExecutorService es = Executors.newCachedThreadPool();
		for(int i = 0;i < urls.size();i++){
			UploadThread upt = new UploadThread(urls.get(i),datafiles[i]);
			es.submit(upt);
		}
		
		es.shutdown();
		while (!es.isTerminated()) {
			try {
				es.awaitTermination(3600, TimeUnit.SECONDS);
			} catch (InterruptedException e) {
				System.out.println("Query execution interrupted.");
			} catch (IllegalMonitorStateException e) {
				System.out.println("IllegalMonitorStateException");
			}
		}
	}
	
	
}


class ChunkCommitter implements RDFHandler {

	private RDFInserter inserter;
    private RepositoryConnection conn;
    private long resume=0;
    private String uri;
    private long count = 0L;
    // do intermittent commit every 500,000 triples
    private long chunksize = 50000L;
    
    public ChunkCommitter(RepositoryConnection conn,String uri) {
        inserter = new RDFInserter(conn);
        this.conn = conn;
        this.uri = uri;
    }

    @Override
    public void startRDF() throws RDFHandlerException {
        inserter.startRDF();
        if(DataLoader.append) {
        resume = LineRecord.getInstance().getLine(uri);
        System.out.println(uri+" resumes from line "+resume);
        }
    }

    @Override
    public void endRDF() throws RDFHandlerException {
    	try {
			conn.commit();
			LineRecord.getInstance().update(uri, count);
			System.out.println("Finished: "+count+" triples have been uploaded to "+uri);
		} catch (RepositoryException e) {
			e.printStackTrace();
		}
        inserter.endRDF();
    }

    @Override
    public void handleNamespace(String prefix, String uri)
            throws RDFHandlerException {
        inserter.handleNamespace(prefix, uri);
    }

    @Override
    public void handleStatement(Statement st) throws RDFHandlerException {
    	if(DataLoader.append && count<resume) {
    		count++;
    		return;
    	}
    	
        inserter.handleStatement(st);
        count++;
        // do an intermittent commit whenever the number of triples
        // has reached a multiple of the chunk size
        if (count % chunksize == 0) {
            try {
                conn.commit();
                LineRecord.getInstance().update(uri, count);
                //if(count% chunksize == 0)
                System.out.println(count+" triples have been uploaded to "+uri);
            } catch (RepositoryException e) {
            	throw new RDFHandlerException(e);
            }
        }
    }

    @Override
    public void handleComment(String comment) throws RDFHandlerException {
        inserter.handleComment(comment);
    }
	
}

class LineRecord {
	private Map<String,Long> lineRecs = new HashMap<String,Long>();
	private File record = new File(Configuration.BASE_DIR,"line_recs.txt");
	static private LineRecord instance = null;
	
	static public LineRecord getInstance() {
		if(instance == null)
			instance = new LineRecord();
		return instance;
	}
	
	private LineRecord() {
		if(!record.exists()) {
			try {
				record.createNewFile();
			} catch (IOException e) {
				e.printStackTrace();
			}
		}
		
		try {
			BufferedReader buff = new BufferedReader(new FileReader(record));
			String line = buff.readLine();
			while(line!=null){
				String[] parts = line.split(": ");
				lineRecs.put(parts[0], Long.parseLong(parts[1]));
				line = buff.readLine();
			}
			buff.close();
		} catch (FileNotFoundException e) {
			e.printStackTrace();
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	synchronized public long getLine(String url) {
		Long line = lineRecs.get(url);
		if(line == null)
			line = 0L;
		return line;
	}
	
	synchronized public void update(String url, long line) {
		lineRecs.put(url, line);
		BufferedWriter bw;
		try {
			bw = new BufferedWriter(new FileWriter(record));
			List<String> urls = new ArrayList<String>();
			urls.addAll(lineRecs.keySet());
			Collections.sort(urls);
			for(String u:urls) {
				bw.write(u+": "+lineRecs.get(u));
				bw.newLine();
			}
			bw.flush();
			bw.close();
		} catch (IOException e) {
			e.printStackTrace();
		}
		
	}
	
}

class UploadThread extends Thread {

	private String uri;
	private File input;
	
	UploadThread(String uri, File input) {
		this.uri = uri;
		this.input = input;
	}
	
	@Override
	public void run() {
		Repository rep = null;
		RepositoryConnection conn = null;
		rep = new HTTPRepository(uri);
		try {
			rep.initialize();
			conn = rep.getConnection();
			conn.setAutoCommit(false);
			if(!DataLoader.append)
				conn.clear();
			System.out.println("Start loading " + input + " to " + uri);
			RDFParser parser = Rio.createParser(DataLoader.informat);
			parser.setRDFHandler(new ChunkCommitter(conn,uri));
			InputStream is = new FileInputStream(input);
			if(input.getName().toLowerCase().endsWith("gz")) {
				is = new GZIPInputStream(is);
			}
			parser.parse(is, "xw4g08");
			conn.close();
			rep.shutDown();
			System.out.println("Datafile " + input + ", " + 
			" have been loaded to " + uri + " sucessfully.");
		} catch (RepositoryException e) {
		} catch (FileNotFoundException e) {
		} catch (IOException e) {
		} catch (RDFParseException e) {
		} catch (RDFHandlerException e) {
		}
	}
}

