package zunge.lbe.analysis;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;

import zunge.tools.DbConn;
import zunge.tools.Tools;

public class DataSet {
	public static final int P = 12; //#Permissions
	public static final int TOPA = 1000;//200; //Top #apps
	public static final int V = 10; //Folds
	
	public String folder;
	public HashMap<Long,Integer> uMap;//user_id -> uId
	public HashMap<Long,Integer> aMap;//app_id -> aId
	public HashSet<Integer> topASet;//top 100 apps (aId)
	
	public List<DataSetFold> folds;
	
	public String getFile(String label){return folder +label;}
	public static final String fsuf_uMap = "uMap.txt";
	public static final String fsuf_aMap = "aMap.txt";
	public static final String fsuf_topASet = "topASet.txt";
	public static final String fsuf_recordU = "recordU.txt";
	public static final String fsuf_recordUAP = "recordUAP.txt";
	
	
	DataSet(String folder, DbConn conn, boolean create) throws Exception{
		this.folder = folder;
		if(create){
			gen(conn);
		}else{
			load();
		}
		folds = DataSetFold.getFolds(this, V, create);
	}
	
	void gen(DbConn conn) throws Exception{
		new File(folder).mkdir();
		
		genFromUAPD(conn);
	}
	
	void genFromUAPD(DbConn conn) throws Exception{		
		//Pre-screening is already done in the data collection part
		//table stat_uap,  tmp_u, tmp_a
		
		//Generate Maps
		uMap = new HashMap<Long, Integer>();
		String query = "select u_id from tmp_u";
		ResultSet rs = conn.executeQuery(query);
		while(rs.next()){
			uMap.put(rs.getLong(1), uMap.size()+1);
		}
		rs.close();
		aMap = new HashMap<Long, Integer>();
		query = "select a_id from tmp_a";
		rs = conn.executeQuery(query);
		while(rs.next()){
			aMap.put(rs.getLong(1), aMap.size()+1);
		}
		rs.close();
		
		/*
		query = "select a.a_id from a join awhite on a.a_name = awhite.a_name";
		rs = conn.executeQuery(query);
		while(rs.next()){
			aMap.remove(rs.getLong(1));
		}
		rs.close();
		*/
		
		
		topASet = new HashSet<Integer>();
		query = "select a_id from tmp_a order by ucount desc";
		rs = conn.executeQuery(query);
		for(int i = 0;i < TOPA;++i){
			rs.next();
			topASet.add(aMap.get(rs.getLong(1)));
		}
		rs.close();
		Tools.saveMap(uMap, getFile(fsuf_uMap));
		Tools.saveMap(aMap, getFile(fsuf_aMap));
		Tools.saveSet(topASet, getFile(fsuf_topASet));

		//Generate recordUAP, recordU
		long u_id,a_id;int d,uid,aid,pid,did;
		BufferedWriter bwuap = new BufferedWriter(new FileWriter(getFile(fsuf_recordUAP)));
		BufferedWriter bwu = new BufferedWriter(new FileWriter(getFile(fsuf_recordU)));
		query = "set session group_concat_max_len = 10000000;";
		conn.executeUpdate(conn.getPreparedStatement(query));
		query = "select u_id, group_concat(a_id, ':', p_id, ':', d_final separator ' ') from stat_uap group by u_id";
		rs = conn.executeQueryInStream(query);
		while(rs.next()){
			u_id = rs.getLong(1); if(!uMap.containsKey(u_id)){continue;}
			uid = uMap.get(u_id);
			String[] apds = rs.getString(2).split(" ");
			if(apds.length == 0){continue;}
			
			StringBuffer sb = new StringBuffer();
			sb.append(uid+" ");
			for(String apd:apds){
				String[] frags = apd.split(":");
				a_id = Long.parseLong(frags[0]); if(!aMap.containsKey(a_id)){continue;}
				aid = aMap.get(a_id);
				
				pid = Integer.parseInt(frags[1]) + 1;
				d = Integer.parseInt(frags[2]);
				did = 0;
				switch(d){
				case 1:did = 1;break;
				case 2:did = -1;break;
				case 3://did = 0;break;
				default:continue;}
				bwuap.write(uid+" "+aid+" "+pid+" "+did+"\n");
				sb.append(aid+":"+pid+":"+did+",");
			}
			if(sb.toString().indexOf(":")==-1){continue;}
			bwu.write(sb.substring(0, sb.length()-1)+"\n");
		}
		rs.close();
		bwuap.close();
		bwu.close();
	}
	
	void load() throws Exception{
		uMap = Tools.loadMap(getFile(fsuf_uMap));
		aMap = Tools.loadMap(getFile(fsuf_aMap));
		topASet = Tools.loadSet(getFile(fsuf_topASet));
	}
	
}
