﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading;
using System.IO;

namespace Game
{
    enum AIState { KILL, ATK, DEF, ESCAPE , WAIT };
    class NeuralNetwork
    {
        const int _inputLayerNum = 14 ;
        const int _hiddenLayerNum = 5 ;
        const int _unusedInputLayerNum = 2;
        const double _learnIngrate = 0.9;

        double[,] _weight;
        double[] _hiddenLayer;

        public NeuralNetwork()
        {
            _weight = new double[_inputLayerNum, _hiddenLayerNum];
            _hiddenLayer = new double [_hiddenLayerNum];
        }

        
        public int RunNeuralNetwork(int enemyBossHp, int enemyAllHp, int enemyPawnNum, int enemyAcherNum, int enemyKnightNum,
                             int selfBossHp, int selHallHp, int selfPawnNum, int selfAcherNum, int selfKnightNum,
                              int attackPowerToBoss , int damageByEnemy )
        {            
            int answer = 0;
            Loadweight();
            int[] nnParams = { enemyBossHp, enemyAllHp, enemyPawnNum, enemyAcherNum, enemyKnightNum,
                               selfBossHp, selHallHp, selfPawnNum, selfAcherNum, selfKnightNum,
                               attackPowerToBoss, damageByEnemy};

            for (int i = 0 ; i < nnParams.Length ; ++i) {
                _hiddenLayer[i] += selfPawnNum * _weight[7, i];
            }
            
            for (int i = 0; i < _hiddenLayerNum; i++) {
                _hiddenLayer[i] = Sigmoid( _hiddenLayer[i]);
            } 

            for (int i = 0; i < _hiddenLayerNum; i++)
            {
                if (_hiddenLayer[i] > 0.6) answer = i;   
                //Console.Write("{0} ", _hiddenlayer[j]);
            }
            /*Console.Write("{0} {1} {2} {3} {4} {5} {6} {7} {8} {9} {10} {11}",  enemybosshp, enemyallhp,  enemypawnnum,  enemyachernum,  enemyknightnum,
                              selfbosshp,selfallhp,  selfpawnnum,  selfachernum, selfknightnum,
                              attackpowertoeboss ,  damagebyenemy );
            Console.Write("\n{0}", answer);
            Console.ReadLine();*/
            return answer;
        }

        void Loadweight()
        {
            StreamReader sr = new StreamReader(@"NNW.dat");
            int j = 0;
            while (!sr.EndOfStream) {               // 每次讀取一行，直到檔尾
                string line = sr.ReadLine();            // 讀取文字到 line 變數
                               
                var parts = line.Split(' ');
                for (int i = 0 ; i < _inputLayerNum ; ++i) {
                    _weight[i,j] = double.Parse(parts[i]);
                }
                ++j;
            }            
            sr.Close();
        }

        void Wrightweight()
        {
            StreamWriter sw = new StreamWriter(@"NNW.dat");
            for (int j = 0; j < _hiddenLayerNum; j++) {
                for (int i = 0; i < _inputLayerNum; i++) {
                    sw.Write("{0} ", _weight[i, j]);            // 寫入文字                    
                }
                sw.Write("\n"); 
            }           
            sw.Close();                     // 關閉串流
        }

        void Training()
        {
            StreamReader td = new StreamReader(@"td.txt");
            int[] testdata = new int[_inputLayerNum - _unusedInputLayerNum];           
            int trueans;
            while (!td.EndOfStream)
            {               
                string line = td.ReadLine(); 
                var parts = line.Split(' ');
                for (int i = 0 ; i < _inputLayerNum - _unusedInputLayerNum ; ++i)
                {
                    testdata[i] = int.Parse(parts[i]);
                }
                line = td.ReadLine(); 
                parts = line.Split(' ');                
                trueans = int.Parse(parts[0]);
                if (trueans != RunNeuralNetwork(testdata[0] , testdata[1] ,testdata[2] ,testdata[3] ,testdata[4] ,testdata[5] ,testdata[6] ,testdata[7] ,testdata[8] ,testdata[9] ,testdata[10] ,testdata[11]))
                {
                    WeightChange(trueans, testdata);
                    Wrightweight();
                }
            }
            td.Close();
        }


        void WeightChange(int trueAns,int[] input)
        {
            //double [,]weighterror = new double[_inputlayernum, _hiddenlayernum];
            double[] hiddenLayerError = new double[_hiddenLayerNum];
            /*解*/
            double[] trueSolution = new double[_hiddenLayerNum];

            for (int i = 0 ; i < trueSolution.Length ; ++i) {
                if (i == trueAns) trueSolution[i] = 1.0;
                else trueSolution[i] = 0.0;
            }

            /*計算隱藏層誤差*/
            for (int i = 0 ; i < _hiddenLayerNum; ++i) {
                hiddenLayerError[i] = Error(_hiddenLayer[i], trueSolution[i]);
            }
            
            for (int i = 0; i < _hiddenLayerNum; ++i) {
                for (int j = 0; j < _inputLayerNum - _unusedInputLayerNum; ++j) {
                    _weight[j, i] = _weight[j, i] + _learnIngrate * hiddenLayerError[i] * input[j];
                }
            }
        }

        public double Sigmoid(double x)
        {
            return 1 / (1 + Math.Exp(-1 * x)) ;
        }

        double Error(double output, double trueValue)
        {
            return output * (1.0 - output) + (trueValue - output);
        }

        double ErrorInput(double output, double errorInNext , double weight )
        {
            return output * (1.0 - output) * errorInNext * weight;
        }
    }
}
