#include"head.h"

using namespace std;

LD Alfpha;

struct nero {
	struct connectF {
		LD weight;
		LD bight;
		nero* pointTo;

		connectF():weight(0),bight(0),pointTo(nullptr){}
	};

	struct connectB {
		nero* from;
		size_t label;
	};

	vector<connectF> weights;
	vector<connectB> sighn;
	vector<LD> buffer;
	size_t layer;

	nero(size_t input_size, size_t output_branch, vector<nero>& nextLayer, size_t Layer) {
		weights.resize(output_branch);
		buffer.resize(input_size, 0);
		layer = Layer;

		

		for (int i = 0; i < output_branch; i++) {
			weights[i].weight = getrandom();
			weights[i].pointTo = &nextLayer[i];
		}
	}

	nero(size_t input_size, vector<connectF>& weightS) {
		weights = weightS;;
		buffer.resize(input_size, 0);
	}

	LD activate(LD x) {
		return sigmoid(x);
	}

	void sum(vector<LD>& inbuf, connectF& inweight, connectB& From) {
		for (int i = 0; i < buffer.size(); i++) {
			buffer[i] += activate(inbuf[i] * inweight.weight + inweight.bight);
		}
		sighn.push_back(From);
	}

	void forward() {
		connectB temp;

		for (size_t i = 0; i < buffer.size(); i++) buffer[i] /= sighn.size();

		for (size_t i = 0; i < weights.size(); i++) {
			temp.from = this;
			temp.label = i;

			(weights[i].pointTo)->sum(buffer, weights[i], temp);
		}
	}

	nero* backward(size_t rqLayer, size_t rqLable, LD& rq, LD& current, bool isweight) {
		if (rqLayer == layer) {
			rq = isweight ? weights[rqLable].weight : weights[rqLable].bight;
			return this;
		}


	}


};