package com.example.harmonet.harmtorch;

public class Relu implements Layer {

    @Override
    public Tensor forward(Tensor in) {
        Tensor out = new Tensor(in.dim());
        for(int i = 0 ; i < in.tensor().length; ++i) {
            if (in.tensor()[i] >= 0) {
                out.tensor()[i] = in.tensor()[i];
            }
            else {
                out.tensor()[i] = 0;
            }
        }
        return out;
    }

    @Override
    public int getParam() {
        return 0;
    }

    @Override
    public void init(float[] param) {
    }
}
