/// <summary>
///
/// </summary>
namespace Kag.NeuralNetworks {
	/// <summary>
	/// 
	/// </summary>
	public enum NormalizationMethod : byte {
		None,
		Regular,
		Statistic,
		HyperbolicTangent,
	}
	/// <summary>
	/// 
	/// </summary>
	public interface IActivation {
		float Function(float input);
		float Derivative(float input, float output);
	}
	/// <summary>
	/// 
	/// </summary>
	public abstract class NeuralNetwork {
		protected float epsilon=0.00001f;
		float rate;
		public float Rate {
			get { return rate; }
			set { rate=value; }
		}
		float[] sample;
		public float[] Sample {
			get { return sample; }
			set { sample=value; }
		}
		float[] output;
		public float[] Output {
			get { return output; }
		}
		float[] error;
		public float[] Error {
			get { return error; }
		}
		ushort nInput;
		public int NInput {
			get { return nInput; }
		}
		ushort nOutput;
		public int NOutput {
			get { return nOutput; }
		}
		short learningNeuron=-1;
		public int LearningNeuron {
			get { return learningNeuron; }
			set { learningNeuron=(short)value; }
		}
		public NeuralNetwork(int nInput, int nOutput) {
			Debug.Assert(nInput>0&&nInput<256 && nOutput>0&&nOutput<256);
			this.nInput=(ushort)nInput; this.nOutput=(ushort)nOutput;
			output=new float[nOutput];
			error=new float[nInput];
		}
		public abstract void Forward();
		public abstract void Backward();
		public abstract int WeightCount { get; }
		public abstract void ForEachWeight(Callbacks callback);
		public virtual float LoadWeights(byte[] weights) {
			return 0;
		}
		public virtual void SaveWeights(byte[] weights) {
		}
		protected static float GetWeight(byte[] weights, ref int i) {
			int iw=from_gray[weights[i]]*256+from_gray[weights[i+1]];
			iw-=32768;
			float ret=(float)iw/256/64;
			i+=2;
			return ret;
		}
		protected static float GetPlusWeight(byte w1, byte w2, int scale) {
			int w=from_gray[w1]*256+from_gray[w2];
			return (float)w/scale;
		}
		protected static float GetWeight(byte w1, byte w2, int scale) {
			int w=from_gray[w1]*256+from_gray[w2];
			return (float)(w-32768)/scale;
		}
		protected static void SetWeight(float w, byte[] weights, ref int i) {
			int iw=(int)(w*64*256+0.5f);
			if( iw>32767 ) iw=32767; else if( iw<-32768 ) iw=-32768;
			iw+=32768;
			weights[i]=(byte)to_gray[iw/256];
			weights[i+1]=(byte)to_gray[iw%256];
			i+=2;
		}
		static NeuralNetwork() {
			create_gray();
		}
		static int[] from_gray=new int[256];
		static int[] to_gray=new int[256];
		static void create_gray() {
			int ig=0;
			for( int i=0; i<256; ++i )
				from_gray[i]=-1;
			from_gray[ig]=0;
			to_gray[0]=0;
			for( int i=1; i<256; ++i ) {
				int mask=1;
				while( true ) {
					int igs=ig^mask;
					if( from_gray[igs]==-1 ) {
						from_gray[igs]=i;
						to_gray[i]=igs;
						ig=igs;
						break;
					}
					mask<<=1;
					if( mask>=256 ) throw new System.Exception();
				}
			}
		}
		public virtual void Backup() {
		}
		public virtual void Restore() {
		}
	}
	/// <summary>
	/// 
	/// </summary>
	public sealed class Perceptron23 : NeuralNetwork {
		const int NG=3;
		readonly float[] weight, backup;
		readonly IActivation activation;
		readonly bool[] blocks;
		double error;
		int ND;
		public Perceptron23(int nInput, int nOutput, IActivation activation) : base(nInput, nOutput) {
			this.activation=activation;
			int NL=nInput*NG;
			weight=new float[(NL+1)*nOutput];
			backup=new float[weight.Length];
			blocks=new bool[NInput];
		}
		public override void Forward() {
			int ii=0;
			for( int io=0; io<NOutput; io++ ) {
				float osum=0;
				for( int i=0; i<NInput; i++ ) {
					if( !blocks[i] ) 
						osum+=(weight[ii+(Sample[i]<0?1:2)]+weight[ii])*Sample[i];
					ii+=NG;
				}
				osum+=weight[ii++];
				Output[io]=activation.Function(osum);
			}
		}
		public override void Backward() {
			++ND;
			int ii=0;
			for( int io=0; io<NOutput; io++ ) {
				float dif=Sample[NInput+io]-Output[io];
				error+=dif*dif;
				float sigma=Rate*dif;
				for( int i=0; i<NInput; i++ ) {
					if( !blocks[i] ) {
						float s=Sample[i];
						float delta=sigma*s;
						weight[ii]+=delta;
						weight[ii+(s<0?1:2)]+=delta;
					}
					ii+=NG;
				}
				weight[ii++]+=sigma;
			}
		}
		public override int WeightCount {
			get { return weight.Length; }
		}
		public override void ForEachWeight(Callbacks callback) {
			for( int i=0; i<weight.Length; i++ )
				callback.Callback(ref weight[i]);
		}
		public override void Backup() {
			weight.CopyTo(backup, 0);
		}
		public override void Restore() {
			backup.CopyTo(weight, 0);
		}
		public void EndEpoch() {
			double alpha=0;
			for( int i=0; i<weight.Length; ++i )
				alpha+=weight[i]*weight[i];
			alpha=alpha==0?0:(weight.Length*error/alpha/ND/NOutput);
			ND=0; error=0;
			alpha*=Rate;
			for( int i=0; i<weight.Length; ++i )
				weight[i]-=(float)(weight[i]*alpha);
		}
		public void BlockInput(int i) {
			blocks[i]=true;
		}
		public void UnblockInput(int i) {
			blocks[i]=false;
		}
	}
	/// <summary>
	/// 
	/// </summary>
	public sealed class Perceptron : NeuralNetwork {
		readonly float[] weight;
		readonly float[] backup;
		readonly bool[] blocks;
		readonly IActivation activation;
		public Perceptron(int nInput, int nOutput, IActivation activation) : base(nInput, nOutput) {
			this.activation=activation;
			weight=new float[(NInput+1)*nOutput];
			backup=new float[weight.Length];
			blocks=new bool[NInput];
			for( int i=0; i<NInput; ++i )
				blocks[i]=false;
		}
		public override void Forward() {
			int ii=0;
			for( int io=0; io<NOutput; io++ ) {
				float osum=0;
				for( int i=0; i<NInput; i++ ) {
					if( !blocks[i] )
						osum+=weight[ii]*Sample[i];
					++ii;
				}
				osum+=weight[ii++];
				Output[io]=activation.Function(osum);
			}
		}
		public override void Backward() {
			int ii=0;
			for( int io=0; io<NOutput; io++ ) {
				float dif=Sample[NInput+io]-Output[io];
				float sigma=Rate*dif;
				for( int i=0; i<NInput; i++ ) {
					if( !blocks[i] )
						weight[ii]+=sigma*Sample[i];
					++ii;
				}
				weight[ii++]+=sigma;
			}
		}
		public override int WeightCount {
			get { return weight.Length; }
		}
		public override void ForEachWeight(Callbacks callback) {
			for( int i=0; i<weight.Length; i++ )
				callback.Callback(ref weight[i]);
		}
		public override void Backup() {
			weight.CopyTo(backup, 0);
		}
		public override void Restore() {
			backup.CopyTo(weight, 0);
		}
		public void BlockInput(int i) {
			blocks[i]=true;
		}
		public void UnblockInput(int i) {
			blocks[i]=false;
		}
	}
	/// <summary>
	/// 
	/// </summary>
	public sealed class PerceptronCat : NeuralNetwork {
		readonly float[] weight;
		readonly float[] backup;
		readonly bool[] blocks;
		readonly IActivation activation;
		readonly int nCat;
		public PerceptronCat(int nInput, int nCat, int nOutput, IActivation activation) : base(nInput, nOutput) {
			this.activation=activation;
			this.nCat=nCat;
			weight=new float[NInput*nOutput*(nCat+1)];
			backup=new float[weight.Length];
			blocks=new bool[NInput];
			for( int i=0; i<NInput; ++i )
				blocks[i]=false;
		}
		public override void Forward() {
			int ii=0;
			int cat=(int)Sample[NInput-1];
			for( int io=0; io<NOutput; io++ ) {
				float osum=0;
				for( int i=0; i<NInput-1; i++ ) {
					if( !blocks[i] )
						osum+=weight[ii]*Sample[i];
					++ii;
				}
				ii+=cat*(NInput-1);
				for( int i=0; i<NInput-1; i++ ) {
					if( !blocks[i] )
						osum+=weight[ii]*Sample[i];
					++ii;
				}
				ii+=(nCat-cat-1)*(NInput-1);
				osum+=weight[ii++];
				Output[io]=activation.Function(osum);
			}
		}
		public override void Backward() {
			int ii=0;
			int cat=(int)Sample[NInput-1];
			for( int io=0; io<NOutput; io++ ) {
				float dif=Sample[NInput+io]-Output[io];
				float sigma=Rate*dif;
				for( int i=0; i<NInput-1; i++ ) {
					if( !blocks[i] )
						weight[ii]+=sigma*Sample[i];
					++ii;
				}
				ii+=cat*(NInput-1);
				for( int i=0; i<NInput-1; i++ ) {
					if( !blocks[i] )
						weight[ii]+=sigma*Sample[i];
					++ii;
				}
				ii+=(nCat-cat-1)*(NInput-1);
				weight[ii++]+=sigma;
			}
		}
		public override int WeightCount {
			get { return weight.Length; }
		}
		public override void ForEachWeight(Callbacks callback) {
			for( int i=0; i<weight.Length; i++ )
				callback.Callback(ref weight[i]);
		}
		public override void Backup() {
			weight.CopyTo(backup, 0);
		}
		public override void Restore() {
			backup.CopyTo(weight, 0);
		}
		public void BlockInput(int i) {
			blocks[i]=true;
		}
		public void UnblockInput(int i) {
			blocks[i]=false;
		}
	}
	/// <summary>
	/// 
	/// </summary>
	public sealed class PerceptronTeam : NeuralNetwork {
		Perceptron[] pers;
		public PerceptronTeam(int nInput, int nOutput, IActivation activation, int nTeam) : base(nInput, nOutput) {
			pers=new Perceptron[nTeam];
			for( int i=0; i<nTeam; ++i )
				pers[i]=new Perceptron(nInput, nOutput, activation);
		}
		public override void Forward() {
			System.Array.Clear(Output, 0, Output.Length);
			for( int i=0; i<pers.Length; ++i ) {
				Perceptron p=pers[i];
				p.Sample=Sample;
				p.Forward();
				for( int j=0; j<NOutput; ++j )
					Output[j]+=p.Output[j];
			}
			for( int j=0; j<NOutput; ++j )
				Output[j]/=pers.Length;
		}
		public override void Backward() {
			for( int i=0; i<pers.Length; ++i ) {
				Perceptron p=pers[i];
				p.Rate=Rate*(float)Math.Random.NextDouble()*2;
				p.Backward();
			}
		}
		public override int WeightCount {
			get { return pers.Length==0?0:pers.Length*pers[0].WeightCount; }
		}
		public override void ForEachWeight(Callbacks callback) {
			for( int i=0; i<pers.Length; ++i ) 
				pers[i].ForEachWeight(callback);
		}
		public override void Backup() {
			for( int i=0; i<pers.Length; ++i ) 
				pers[i].Backup();
		}
		public override void Restore() {
			for( int i=0; i<pers.Length; ++i ) 
				pers[i].Restore();
		}
	}
	/// <summary>
	/// 
	/// </summary>
	public sealed class Perceptron3 : NeuralNetwork {
		float[] hweight;
		float[] oweight;
		float[] bhweight;
		float[] boweight;
		float[] hcombination;
		float[] houtput;
		float[] ocombination;
		float[] sigma;
		IActivation activation;
		ushort nHidden;
        public float[] State {
            get { return houtput; }
        }
		public Perceptron3(int nInput, int nHidden, int nOutput, IActivation activation) : base(nInput, nOutput) {
			this.activation=activation;
			this.nHidden=(ushort)nHidden;
			hweight=new float[(nInput+1)*nHidden];
			bhweight=new float[hweight.Length];
			oweight=new float[(nHidden+1)*nOutput];
			boweight=new float[oweight.Length];
			hcombination=new float[nHidden];
			houtput=new float[nHidden];
			sigma=new float[nHidden];
			ocombination=new float[nOutput];
			LearningNeuron=-1;
		}
		public override void Forward() {
			int iw=0;
			for( int ih=0; ih<nHidden; ih++ ) {
				float hsum=0;
				for( int ii=0; ii<NInput; ii++ )
					hsum+=hweight[iw++]*Sample[ii];
                hsum+=hweight[iw++];
                hcombination[ih]=hsum;
				houtput[ih]=activation.Function(hsum);
			}
			iw=0;
			for( int io=0; io<NOutput; io++ ) {
				float osum=0;
				for( int ih=0; ih<nHidden; ih++ )
					osum+=oweight[iw++]*houtput[ih];
                osum+=oweight[iw++];
				ocombination[io]=osum;
				Output[io]=activation.Function(osum);
			}
		}
		public override void Backward() {
			System.Array.Clear(sigma, 0, sigma.Length);
			int iw=0;
			for( int io=0; io<NOutput; io++ ) {
				float sig2=(Sample[NInput+io]-Output[io])*Rate;
                float sig=sig2*Output[io]*(1-Output[io]);
				for( int ih=0; ih<nHidden; ih++ ) {
					sigma[ih]+=oweight[iw]*sig;
					oweight[iw++]+=sig*houtput[ih];
				}
				oweight[iw++]+=sig;
			}
			iw=0;
			for( int ih=0; ih<nHidden; ih++ ) {
				sigma[ih]*=houtput[ih]*(1-houtput[ih]);
				for( int ii=0; ii<NInput; ii++ )
					hweight[iw++]+=sigma[ih]*Sample[ii];
				hweight[iw++]+=sigma[ih];
			}
		}
		public override int WeightCount {
			get { return hweight.Length+oweight.Length; }
		}
		public override void ForEachWeight(Callbacks callback) {
			for( int i=0; i<hweight.Length; i++ )
				callback.Callback(ref hweight[i]);
			for( int i=0; i<oweight.Length; i++ )
				callback.Callback(ref oweight[i]);
		}
		public override void Backup() {
			hweight.CopyTo(bhweight, 0);
			oweight.CopyTo(boweight, 0);
		}
		public override void Restore() {
			bhweight.CopyTo(hweight, 0);
			boweight.CopyTo(oweight, 0);
		}
        public float[] Hidden {
            get { return houtput; }
        }
	}
	/// <summary>
	/// 
	/// </summary>
	public sealed class SigmoidActivation : IActivation {
		public float Function(float input) {
			return Math.Sigmoid(input);
		}
		public float Derivative(float input, float output) {
			return Math.SigmoidDerivative(output);
		}
	}
	/// <summary>
	/// 
	/// </summary>
	public sealed class ElliottSigmoidActivation : IActivation {
		public float Function(float input) {
			return input/(2*(1+System.Math.Abs(input)))+0.5f;
		}
		public float Derivative(float input, float output) {
			input=1+System.Math.Abs(input);
			return 1/(2*input*input);
		}
	}
	/// <summary>
	/// 
	/// </summary>
	public sealed class FactoredSigmoidActivation : IActivation {
		float factor;
		public FactoredSigmoidActivation(float factor) {
			this.factor=factor;
		}
		public float Function(float input) {
			return Math.Sigmoid(input, factor);
		}
		public float Derivative(float input, float output) {
			return Math.SigmoidDerivative(output);
		}
	}
	/// <summary>
	/// 
	/// </summary>
	public sealed class RoundlySigmoidActivation : IActivation {
		public float Function(float input) {
			if( input>=4.1f ) return 1;
			else if( input<=-4.1f ) return 0;
			else {
				input/=4.1f;
				return 0.5f+input*(1-System.Math.Abs(input)/2);
			}
		}
		public float Derivative(float input, float output) {
			return Math.SigmoidDerivative(output);
		}
	}
	/// <summary>
	/// 
	/// </summary>
	public sealed class TanhActivation : IActivation {
		public float Function(float input) {
			return Math.Tanh(input);
		}
		public float Derivative(float input, float output) {
			return 1-output*output;
		}
	}
	/// <summary>
	/// 
	/// </summary>
	public sealed class Tools {
		Tools() {
		}
		class RandomizeWeightsCallBack : Callbacks {
			float max;
			public RandomizeWeightsCallBack(float max) {
				this.max=max;
			}
			public override void Callback(ref float weight) {
				weight=Math.SignedRand(max);
			}
		}
		public static void RandomizeWeights(NeuralNetwork network, float max) {
			network.ForEachWeight(new RandomizeWeightsCallBack(max));
		}
		class PerturbWeightsCallBack : Callbacks {
			float max;
			bool relative;
			public PerturbWeightsCallBack(float max, bool relative) {
				this.max=max; this.relative=relative;
			}
			public override void Callback(ref float weight) {
				if( relative )
					weight+=Math.SignedRand(System.Math.Abs(weight)*max);
				else
					weight+=Math.SignedRand(max);
			}
		}
		public static void PerturbWeights(NeuralNetwork network, float max, bool relative) {
			network.ForEachWeight(new PerturbWeightsCallBack(max, relative));
		}
		class DecayWeightsCallBack : Callbacks {
			float how;
			bool relative;
			public DecayWeightsCallBack(float how, bool relative) {
				this.how=how; this.relative=relative;
			}
			public override void Callback(ref float weight) {
				if( relative ) {
					weight-=how*(float)System.Math.Tanh(weight);
					/*float decay=how*(float)System.Math.Sqrt(System.Math.Abs(weight));
					if( weight<0 )
						weight+=decay;
					else
						weight-=decay;*/
				}
				else
					weight-=how;
			}
		}
		public static void DecayWeights(NeuralNetwork network, float how, bool relative) {
			network.ForEachWeight(new DecayWeightsCallBack(how, relative));
		}
	}
	/// <summary>
	/// 
	/// </summary>
	public class Normalization {
		NormalizationMethod[] method;
		double[] median;
		double[] deviation;
		double[,] correlation;
		public Normalization(int nVariables) {
			median=new double[nVariables];
			deviation=new double[nVariables];
			method=new NormalizationMethod[nVariables];
			for( int i=0; i<method.Length; i++ )
				method[i]=NormalizationMethod.Statistic;
		}
		public NormalizationMethod[] Method {
			get { return method; }
		}
		public float Normalize(float variable, int index) {
			if( method[index]!=NormalizationMethod.None ) {
				variable=(float)((variable-median[index])/deviation[index]);
				if( method[index]==NormalizationMethod.HyperbolicTangent )
					variable=Math.Tanh(variable);
			}
			return variable;
		}
		public float Denormalize(float variable, int index) {
			if( method[index]!=NormalizationMethod.None ) {
				if( method[index]==NormalizationMethod.HyperbolicTangent )
					variable=Math.Atanh(variable);
				variable=(float)(variable*deviation[index]+median[index]);
			}
			return variable;
		}
		public void Normalize(float[] sample) {
			for( int i=0; i<sample.Length; i++ )
				sample[i]=Normalize(sample[i], i);
		}
		public void Denormalize(float[] sample) {
			for( int i=0; i<sample.Length; i++ )
				sample[i]=Denormalize(sample[i], i);
		}
		public void Analyze(float[][] samples) {
			System.Array.Clear(deviation, 0, deviation.Length);
			System.Array.Clear(median, 0, median.Length);
			int nRow=samples.Length;
			int nColumn=samples[0].Length;
			float[] min=new float[nColumn];
			float[] max=new float[nColumn];
			for( int i=0; i<nColumn; ++i ) {
				min[i]=float.MaxValue; 
				max[i]=float.MinValue;
			}
			for( int i=0; i<nRow; ++i ) {
				for( int j=0; j<nColumn; ++j ) {
					float v=samples[i][j];
					if( v>max[j] ) max[j]=v;
					if( v<min[j] ) min[j]=v;
					median[j]+=v;
					deviation[j]+=(double)v*v;
				}
			}
			for( int j=0; j<nColumn; ++j ) {
				if( method[j]==NormalizationMethod.Regular ) {
					median[j]=(min[j]+max[j])/2;
					deviation[j]=(max[j]-min[j])/2;
				}
				else { 
					deviation[j]=(float)System.Math.Sqrt((deviation[j]-median[j]/nRow*median[j])/(nRow-1));
					median[j]/=nRow;
				}
			}
		}
		public void Normalize(float[][] samples) {
			int nRow=samples.Length;
			for( int i=0; i<nRow; i++ )
				Normalize(samples[i]);
		}
		public void Denormalize(float[][] samples) {
			int nRow=samples.Length;
			for( int i=0; i<nRow; i++ )
				Denormalize(samples[i]);
		}
		public void Correlation(float[][] samples, int nInputs) {
			int nx=nInputs, ny=median.Length-nInputs;
			int N=samples.Length;
			correlation=new double[nx, ny];
			for( int iy=0; iy<ny; ++iy ) {
				for( int ix=0; ix<nx; ++ix ) {
					for( int i=0; i<N; ++i ) {
						correlation[ix,iy]+=(samples[i][ix]-median[ix])*(samples[i][nx+iy]-median[nx+iy]);
					}
					correlation[ix,iy]=correlation[ix,iy]/N/deviation[ix]/deviation[nx+iy];
				}
			}
			bool[] used=new bool[nx];
			float k=1f;
			float dk=0.95f;
			for( int i2x=0; i2x<nx; ++i2x ) {
				int imax=-1;
				float max=0;
				for( int ix=0; ix<nx; ++ix ) {
					if( used[ix] ) continue;
					if( imax<0 ) imax=ix;
					float cur=0;
					for( int iy=0; iy<ny; ++iy )
						cur+=(float)System.Math.Abs(correlation[ix,iy]);
					if( cur>max ) {
						max=cur;
						imax=ix;
					}
				}
				deviation[imax]/=k;
				k*=dk;
				used[imax]=true;
			}
		}
	}
	/// <summary>
	/// 
	/// </summary>
	public class Learning {
		NeuralNetwork network;
		public Learning(NeuralNetwork network) {
			this.network=network;
		}
		public void LearnCycle(float[][] samples, float rate) {
			int nSamples=samples.Length;
			network.Rate=rate;
			for( int i=0; i<nSamples; i++ ) {
				network.Sample=samples[i];
				network.Forward();
				network.Backward();
			}
		}
	}
}
