package util;		

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;

import primatives.PointND;

public class PointCloudConverter
{

	public static void Convert_LAT_to_ASC(String lat_input, String asc_output)
	{
		try
		{			
			File asc_file = new File(asc_output);
			
		    BufferedReader in = new BufferedReader(new FileReader(lat_input));
			BufferedWriter out = new BufferedWriter(new FileWriter(asc_file));
		    
		    String str = in.readLine(); // first line tells us the kind of information that is available
		    str = str.replaceAll("\t", "");
		    
		    // write the header for the asc file
		    out.write("*** Converted from " + lat_input + " ***\n");
		    
		    boolean bHasIntensity = str.charAt(0) == 'I';
		    while ((str = in.readLine()) != null)
		    {
		    	str = str.replaceAll("\t\t", " ");
	    		String[] values = str.split(" ");
	    		
	    		if (bHasIntensity) out.write(values[1] + " " + values[2] + " " + values[3] + " " + values[0] + "\n"); // place the intensity value at the end
	    		else out.write(values[0] + " " + values[1] + " " + values[2] + "\n");	    		
		    }
		    out.write("**end cloud**\n");
		    
		    in.close();
		    out.close();
		}
		catch (IOException e)
		{
			e.printStackTrace();
		}
	}
	
	public static void Convert_ASC_to_PCD(String asc_input, String pcd_output)
	{
		/*
			VERSION
			FIELDS
			SIZE
			TYPE
			COUNT
			WIDTH
			HEIGHT
			VIEWPOINT
			POINTS
			DATA
		 */
		try
		{
			File pcd_file = new File(pcd_output+".pcd");
			BufferedWriter out = new BufferedWriter(new FileWriter(pcd_file));
			
			ArrayList<PointND> points = PointCloudReader.ReadFile(asc_input); // read all the points 
			
			// write the header for PCD
			out.write("# Converted from " + asc_input + "\n");
			out.write("VERSION .7" + "\n");
			
			// maybe include RGB data... read the points, then compute the DT
			// from the DT we compute a strike and dip (or whatever statistic that can be colored) per normal per face
			// then we add the color information as we are writing each point in the data section
			out.write("FIELDS x y z\n");
			out.write("SIZE 4 4 4\n"); // number of bytes per coordinate value
			out.write("TYPE F F F\n");
			out.write("COUNT 1 1 1\n"); // number of values per dimension (ie possible to have 380 x values)
			
			// unorganized point cloud
			out.write("WIDTH " + points.size() + "\n");
			out.write("HEIGHT 1\n");
			
			out.write("VIEWPOINT 0 0 0 1 0 0 0\n");
			out.write("POINTS " + points.size() + "\n");
			out.write("DATA ascii\n");
			
			// now write all the points
			for (PointND pt: points)
			{
				out.write(pt.get_coord(0) + " " + pt.get_coord(1) + " " + pt.get_coord(2) + "\n");
			}
		    
		    out.close();
			
		}
		catch (IOException e)
		{
			e.printStackTrace();
		}
	}

	public static void Convert_STL_to_PointSet(String input, String output)
	{
//		ArrayList<STLTriangle> stlmesh = STLBReader.read();
		ArrayList<STLTriangle> stlmesh = STLBReader.read(input);
		ArrayList<PointND> pointset = new ArrayList<PointND>();
		
		HashSet<Float> dim_x =  new HashSet<Float>();
		HashSet<Float> dim_y =  new HashSet<Float>();
		HashSet<Float> dim_z =  new HashSet<Float>();
		
		for (STLTriangle t: stlmesh)
		{			
			float[][] vertices = new float[3][3];
			
			vertices[0][0] = t.v1[0];
			vertices[0][1] = t.v1[1];
			vertices[0][2] = t.v1[2];
		
			vertices[1][0] = t.v2[0];
			vertices[1][1] = t.v2[1];
			vertices[1][2] = t.v2[2];
			
			vertices[2][0] = t.v3[0];
			vertices[2][1] = t.v3[1];
			vertices[2][2] = t.v3[2];
			
			for (int r = 0; r < 3; r++)
			{
				float x = vertices[r][0];
				float y = vertices[r][1];
				float z = vertices[r][2];
				
				// check if v1 is already within the pointset
				boolean bContains_x = dim_x.contains(x);
				boolean bContains_y = dim_y.contains(y);
				boolean bContains_z = dim_z.contains(z);
				
				// if any dimension of the vertex is not within the hashset then we do not have the point yet!
				if (bContains_x == false || bContains_y == false || bContains_z == false)
				{
					PointND p = new PointND(3);
					p.set_coord(0, x);
					p.set_coord(1, y);
					p.set_coord(2, z);
					pointset.add(p);
					
					dim_x.add(x);
					dim_y.add(y);
					dim_z.add(z);
				}
			}
		}
		
		PointCloudWriter.WriteFile_PointND(output, pointset);
	}
	
	public static void main(String[] args)
	{
//		String lat_input = "D:\\SCHOOL\\DATA\\me_too_t.txt";
//		String asc_output = "D:\\SCHOOL\\DATA\\face.txt";
//		PointCloudConverter.Convert_LAT_to_ASC(lat_input, asc_output);
		
//		LINUX
//		String input_dir = "/home2bak/plai2/datasets/limestone/bucket_scaled/256/";
//		for (int i = 1; i <= 13; i++)
//		{
//			Convert_ASC_to_PCD(input_dir+"scan"+i+"_scaled_256.txt", input_dir+"scan"+i+"_buckets_scaled_256");
//		}
//		Convert_ASC_to_PCD(input_dir+"scan1_scaled_256.txt", input_dir+"scan1_buckets_scaled_256");
		
		
//		WINDOWS
		String stl_input_dir = "D:\\SCHOOL\\DATA\\Pole Density Contouring - Jason\\Image Files\\";
		String stl_output_dir = "D:\\SCHOOL\\DATA\\Pole Density Contouring - Jason\\asc\\";
//		Convert_STL_to_PointSet(stl_output+"output.asc");
		
		File dir = new File(stl_input_dir);
		for (File child : dir.listFiles())
		{
			String filename = child.getName().substring(0, child.getName().length()-5); // get the filename without the extension
			Convert_STL_to_PointSet(child.getAbsolutePath(), stl_output_dir+filename+".asc");
		 }
	}
}
