package com.example.harmonet.harmtorch;

public class Relu implements Layer {

    @Override
    public Tensor forward(Tensor in) {
        Tensor out = new Tensor(in._dim);
        for(int i = 0 ; i < in._tensor.length; ++i) {
            if (in._tensor[i] >= 0) {
                out._tensor[i] = in._tensor[i];
            }
            else {
                out._tensor[i] = 0;
            }
        }
        return out;
    }

    @Override
    public int getParam() {
        return 0;
    }

    @Override
    public void init(float[] param) {
    }
}
