/**
 * 穆哈穆德
 */
package org.beykery.mohammed;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.beykery.mohammed.util.FileUtil;
import org.beykery.mohammed.util.HttpUtils;
import org.neuroph.core.NeuralNetwork;
import org.neuroph.core.data.DataSet;
import org.neuroph.core.data.DataSetRow;
import org.neuroph.core.events.LearningEvent;
import org.neuroph.core.events.LearningEventListener;
import org.neuroph.core.learning.SupervisedLearning;
import org.neuroph.nnet.MultiLayerPerceptron;
import org.neuroph.nnet.learning.BackPropagation;

/**
 *
 * @author beykery
 */
public class Main
{

  //String url = "http://ichart.yahoo.com/table.csv?s=600000.SS&a=08&b=25&c=2010&d=09&e=8&f=2010&g=d";
  private static String url = "http://ichart.yahoo.com/table.csv?s=%s&a=%s&b=%s&c=%s&d=%s&e=%s&f=%s&g=d";
  private static int win = 5;
  private static double min = Double.MAX_VALUE;
  private static double max;
  private static double learningRate = 0.001;
  private static double maxError = 0.0002;
  private static Double[] last;//最近的一波行情

  /**
   * 下载数据，预测
   *
   * @param args
   */
  public static void main(String... args)
  {
    File f = downloadCsv("000001.SS", "2000-01-01", "2016-03-15");
    f = train(f);
    double p = predict(f, last);
    DecimalFormat df = new DecimalFormat("0.00000");
    System.out.println(df.format(p));
  }

  /**
   * 下载csv文件
   */
  private static File downloadCsv(String stock, String start, String end)
  {
    File f = new File("./" + stock + "/" + start + " " + end + ".csv");
    if (!f.exists())
    {
      String[] ps = new String[7];
      String[] item = start.split("-");
      ps[0] = stock;
      ps[1] = month(item[1]);//月
      ps[2] = item[2];//日
      ps[3] = item[0];//年
      item = end.split("-");
      ps[4] = month(item[1]);
      ps[5] = item[2];
      ps[6] = item[0];
      String u = String.format(url, ps);
      System.out.println("下载中...(" + u + ")");
      byte[] content = HttpUtils.get(u, null);
      String csv = new String(content);
      FileUtil.write(f, csv);
      System.out.println(csv);
    }
    return f;
  }

  private static String month(String m)
  {
    if (m.startsWith("0"))
    {
      m = m.substring(1);
    }
    return String.valueOf(Integer.parseInt(m) - 1);
  }

  /**
   * 归一
   *
   * @param input
   * @return
   */
  private static double normalize(double input)
  {
    return (input - min) / (max - min) * 0.9 + 0.1;
    //  return input+0.9;
  }

  /**
   * 还原
   *
   * @param input
   * @return
   */
  private static double deNormalize(double input)
  {
    return min + (input - 0.1) * (max - min) / 0.9;
    //return input - 0.9;
  }

  /**
   * 训练
   *
   * @param f
   * @return
   */
  private static File train(File f)
  {
    NeuralNetwork<BackPropagation> neuralNetwork = new MultiLayerPerceptron(win, 2 * win + 1, 1);
    SupervisedLearning learningRule = neuralNetwork.getLearningRule();
    learningRule.setMaxError(maxError);
    learningRule.setLearningRate(learningRate);
    learningRule.addListener(new LearningEventListener()
    {
      @Override
      public void handleLearningEvent(LearningEvent learningEvent)
      {
        SupervisedLearning rule = (SupervisedLearning) learningEvent.getSource();
        System.out.println("网络迭代error "
                + rule.getCurrentIteration() + ": "
                + rule.getTotalNetworkError());
      }
    });
    DataSet trainingSet = loadData(f);
    neuralNetwork.learn(trainingSet);
    f = new File(f.getParentFile(), f.getName() + ".train");
    neuralNetwork.save(f.getAbsolutePath());
    return f;
  }

  /**
   * 加载训练数据
   *
   * @param f
   * @return
   */
  private static DataSet loadData(File f)
  {
    DataSet dataSet = null;
    try
    {
      BufferedReader reader = new BufferedReader(new FileReader(f));
      dataSet = new DataSet(win, 1);
      List<Double> raw = new ArrayList<>();//用来训练的数据
      String line;
      reader.readLine();//略过第一行
      while ((line = reader.readLine()) != null)
      {
        String[] token = line.split(",");
        double d = Double.parseDouble(token[token.length - 1]);
        raw.add(d);
        if (d > max)
        {
          max = d;
        }
        if (d < min)
        {
          min = d;
        }
      }
      Collections.reverse(raw);
      Double[] src = new Double[raw.size()];
      raw.toArray(src);
      for (int i = 0; i <= src.length - win - 1; i++)
      {
        double tv[] = new double[win];
        for (int j = i; j < i + win; j++)
        {
          tv[j - i] = normalize(src[j]);
        }
        double ev[] = new double[]
        {
          normalize(src[i + win])
        };
        dataSet.addRow(new DataSetRow(tv, ev));
      }
      last = new Double[win];
      System.arraycopy(src, src.length - win, last, 0, win);
    } catch (Exception e)
    {
      e.printStackTrace();
    }
    return dataSet;
  }

  /**
   * 大胆预言
   *
   * @return
   */
  private static double predict(File f, Double... ins)
  {
    double[] temp = new double[ins.length];
    NeuralNetwork neuralNetwork = NeuralNetwork.createFromFile(f);
    for (int i = 0; i < ins.length; i++)
    {
      temp[i] = normalize(ins[i]);
    }
    neuralNetwork.setInput(temp);
    neuralNetwork.calculate();
    double[] networkOutput = neuralNetwork.getOutput();
    return deNormalize(networkOutput[0]);
  }
}
