/*
 * Encog(tm) Java Examples v3.3
 * http://www.heatonresearch.com/encog/
 * https://github.com/encog/encog-java-examples
 *
 * Copyright 2008-2014 Heaton Research, Inc.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 *   
 * For more information on Heaton Research copyrights, licenses 
 * and trademarks visit:
 * http://www.heatonresearch.com/copyright
 */
package org.encog.examples.guide.classification;

import java.io.File;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

import org.encog.ConsoleStatusReportable;
import org.encog.Encog;
import org.encog.bot.BotUtil;
import org.encog.examples.guide.regression.AutoRegression;
import org.encog.ml.MLClassification;
import org.encog.ml.MLRegression;
import org.encog.ml.data.MLData;
import org.encog.ml.data.versatile.NormalizationHelper;
import org.encog.ml.data.versatile.VersatileMLDataSet;
import org.encog.ml.data.versatile.columns.ColumnDefinition;
import org.encog.ml.data.versatile.columns.ColumnType;
import org.encog.ml.data.versatile.sources.CSVDataSource;
import org.encog.ml.data.versatile.sources.VersatileDataSource;
import org.encog.ml.factory.MLMethodFactory;
import org.encog.ml.model.EncogModel;
import org.encog.util.csv.CSVFormat;
import org.encog.util.csv.ReadCSV;
import org.encog.util.simple.EncogUtility;

public class IrisClassification {
	public static String DATA_URL = "https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data";

	private String tempPath;

	public File downloadData(String[] args) throws MalformedURLException {
		if (args.length != 0) {
			tempPath = args[0];
		} else {
			tempPath = System.getProperty("java.io.tmpdir");
		}

		File irisFile = new File(tempPath, "iris.csv");
		BotUtil.downloadPage(new URL(IrisClassification.DATA_URL), irisFile);
		System.out.println("Downloading Iris dataset to: " + irisFile);
		return irisFile;
	}

	public void run(String[] args) {
		try {
			// Download the data that we will attempt to model.
//			File irisFile = downloadData(args);
			File irisFile = new File("D:", "brs.csv");
			
			// Define the format of the data file.
			// This area will change, depending on the columns and 
			// format of the file that you are trying to model.
			VersatileDataSource source = new CSVDataSource(irisFile, false,
					CSVFormat.DECIMAL_POINT);
			VersatileMLDataSet data = new VersatileMLDataSet(source);
			data.defineSourceColumn("1", 0, ColumnType.continuous);
			data.defineSourceColumn("2", 1, ColumnType.continuous);
			data.defineSourceColumn("3", 2, ColumnType.continuous);
			data.defineSourceColumn("4", 3, ColumnType.continuous);
			data.defineSourceColumn("5", 4, ColumnType.continuous);
			data.defineSourceColumn("6", 5, ColumnType.continuous);
			data.defineSourceColumn("7", 6, ColumnType.continuous);
			data.defineSourceColumn("8", 7, ColumnType.continuous);
			data.defineSourceColumn("9", 8, ColumnType.continuous);
			data.defineSourceColumn("10", 9, ColumnType.continuous);
			data.defineSourceColumn("11", 10, ColumnType.continuous);
			data.defineSourceColumn("12", 11, ColumnType.continuous);
			data.defineSourceColumn("13", 12, ColumnType.continuous);
			data.defineSourceColumn("14", 13, ColumnType.continuous);
			// Define the column that we are trying to predict.
			ColumnDefinition outputColumn = data.defineSourceColumn("species", 14,
					ColumnType.nominal);
			
			// Analyze the data, determine the min/max/mean/sd of every column.
			data.analyze();
			
			// Map the prediction column to the output of the model, and all
			// other columns to the input.
			data.defineSingleOutputOthersInput(outputColumn);
			
			// Create feedforward neural network as the model type. MLMethodFactory.TYPE_FEEDFORWARD.
			// You could also other model types, such as:
			// MLMethodFactory.SVM:  Support Vector Machine (SVM)
			// MLMethodFactory.TYPE_RBFNETWORK: RBF Neural Network
			// MLMethodFactor.TYPE_NEAT: NEAT Neural Network
			// MLMethodFactor.TYPE_PNN: Probabilistic Neural Network
			EncogModel model = new EncogModel(data);
			model.selectMethod(data, MLMethodFactory.TYPE_FEEDFORWARD);
			
			// Send any output to the console.
			model.setReport(new ConsoleStatusReportable());
			
			// Now normalize the data.  Encog will automatically determine the correct normalization
			// type based on the model you chose in the last step.
			data.normalize();
			
			// Hold back some data for a final validation.
			// Shuffle the data into a random ordering.
			// Use a seed of 1001 so that we always use the same holdback and will get more consistent results.
			model.holdBackValidation(0.3, true, 1001);
			
			// Choose whatever is the default training type for this model.
			model.selectTrainingType(data);
			
			// Use a 5-fold cross-validated train.  Return the best method found.
			MLRegression bestMethod = (MLRegression)model.crossvalidate(5, true);

			// Display the training and validation errors.
			System.out.println( "Training error: " + EncogUtility.calculateRegressionError(bestMethod, model.getTrainingDataset()));
			System.out.println( "Validation error: " + EncogUtility.calculateRegressionError(bestMethod, model.getValidationDataset()));
			
			// Display our normalization parameters.
			NormalizationHelper helper = data.getNormHelper();
			System.out.println(helper.toString());
			
			// Display the final model.
			System.out.println("Final model: " + bestMethod);
			
			// Loop over the entire, original, dataset and feed it through the model.
			// This also shows how you would process new data, that was not part of your
			// training set.  You do not need to retrain, simply use the NormalizationHelper
			// class.  After you train, you can save the NormalizationHelper to later
			// normalize and denormalize your data.
			File irisFile1 = new File("D:", "test.csv");
			ReadCSV csv = new ReadCSV(irisFile1, false, CSVFormat.DECIMAL_POINT);
			String[] line = new String[14];
			MLData input = helper.allocateInputVector();
			List<Double> realValue = new ArrayList<Double>();
			List<Double> predValue = new ArrayList<Double>();

			while (csv.next()) {
				StringBuilder result = new StringBuilder();
				for (int i = 0; i < 14; i++) {
					line[i] = csv.get(i);
				}

				String correct = csv.get(14);
				helper.normalizeInputVector(line, input.getData(), false);
				MLData output = bestMethod.compute(input);
				String irisChosen = helper.denormalizeOutputVectorToString(output)[0];
				Double real = new Double(Double.parseDouble(correct));
				Double pred = new Double(Double.parseDouble(irisChosen));
				realValue.add(real);
				predValue.add(pred);

				result.append(Arrays.toString(line));
				result.append(" -> predicted: ");
				result.append(irisChosen);
				result.append("(correct: ");
				result.append(correct);
				result.append(")");

				System.out.println(result.toString());
			}

			double fenzi = 0;
			double fenmu = 0;
			double mean = 0;
			for (int i = 0; i < realValue.size(); i++) {
				fenzi += (realValue.get(i) - predValue.get(i)) * (realValue.get(i) - predValue.get(i));
				mean += realValue.get(i);
			}
			mean = mean / realValue.size();
			for (int i = 0; i < realValue.size(); i++) {
				fenmu += (realValue.get(i) - mean) * (realValue.get(i) - mean);
			}
			double zuizhong = 1 - fenzi / fenmu;

			 System.out.println("Error"+zuizhong);
			 System.out.println("real"+realValue.size());
		/*	HashMap<Double, Double> hashMap = new HashMap<Double, Double>();
			for (int i = 0; i < realValue.size(); i++) {
				hashMap.put(predValue.get(i), realValue.get(i));
			}
			Collections.sort(predValue);
			System.out.println("size" + predValue.size());
			for (double b : predValue) {
				System.out.println(b);
			}
			System.out.println("+++" + hashMap.size());
			List<Integer> li = new ArrayList<Integer>();
			List<Double> assumeSortedPredValue = new ArrayList<Double>();

			for (int i = 0; i < hashMap.size(); i++) {
				li.add(realValue.indexOf(hashMap.get(predValue.get(i))));
				System.out.println("" + i + "  " + hashMap.get(predValue.get(i)));
				assumeSortedPredValue.add(hashMap.get(predValue.get(i)));
			}

			int count = 0;
			
			 * for(int i=0;i<li.size();i++){ for(int j=0;j<li.size();j++){
			 * if(li.get(i)>li.get(j)) count++; break; } }
			 

			for (int i = 0; i < li.size() - 1; i++) {
				if (((assumeSortedPredValue.get(i) - assumeSortedPredValue.get(i + 1))) > 0.2) {
					count++;
					System.out.println(assumeSortedPredValue.get(i) + " " + assumeSortedPredValue.get(i + 1));
				}
			}

			System.out.println("count" + count);

			List<List<DataCell>> results = new ArrayList<List<DataCell>>();
			List<DataCell> title = new ArrayList<DataCell>();
			DataCell dc1 = new DataCell();
			dc1.setName("predValue");
			title.add(dc1);
			DataCell dc2 = new DataCell();
			dc2.setName("realValue");
			title.add(dc2);
			results.add(title);

			for (int i = 0; i < hashMap.size(); i++) {
				List<DataCell> listDataCell = new ArrayList<DataCell>();
				DataCell dataCell1 = new DataCell();
				dataCell1.setName("" + predValue.get(i));
				listDataCell.add(dataCell1);
				DataCell dataCell2 = new DataCell();
				dataCell2.setName("" + hashMap.get(predValue.get(i)));
				listDataCell.add(dataCell2);
				results.add(listDataCell);
			}
			List<DataCell> emptyDataCell = new ArrayList<DataCell>();
			DataCell empDC = new DataCell();
			empDC.setName(" ");
			emptyDataCell.add(empDC);
			results.add(emptyDataCell);
			List<DataCell> nums = new ArrayList<DataCell>();
			DataCell nums1 = new DataCell();
			nums1.setName("count:");
			nums.add(nums1);
			DataCell nums2 = new DataCell();
			nums2.setName("" + count);
			nums.add(nums2);
			results.add(nums);

			ExcelUtil.write("D:\\compare.xls", results);*/

			// Delete data file and shut down.
			// filename.delete();
			Encog.getInstance().shutdown();

		} catch (Exception ex) {
			ex.printStackTrace();
		}
	}

	/*public int sort(List<Double> predValue, List<Double> realValue) {
		int count = -1;
		List<Double> sortedPredValue = new ArrayList<Double>();
		List<Double> sortedRealValue = new ArrayList<Double>();
		for (int i = 0; i < predValue.size(); i++) {

		}

		return count;
	}*/

	public static void main(String[] args) {
		IrisClassification prg = new IrisClassification();
		prg.run(args);

		System.out.println("**********");
	}
}
