package com.example.nn_app;

import com.example.nn_app.slice.MainAbilitySlice;
import ohos.aafwk.ability.Ability;
import ohos.aafwk.content.Intent;

import java.io.FileInputStream;
import java.util.ArrayList;
import java.util.Arrays;

public class MainAbility extends Ability {
    @Override
    public void onStart(Intent intent) {
        super.onStart(intent);
        super.setMainRoute(MainAbilitySlice.class.getName());
        NN net = new NN(
                new Linear(28 * 28, 512),
                new Relu(),
                new Linear(512, 128),
                new Relu(),
                new Linear(128, 10),
                new Softmax()
        );
        net.readTorch("");
        net.inference();
    }
}

class Tensor {
    float[] _tensor;
    int[] _dim;

    Tensor(int ... dims) {
        int sum = 1;
        for (int dim : dims) {
            sum *= dim;
        }
        _tensor = new float[sum];
        _dim = dims.clone();
    }

    Tensor view() {
        Tensor rc = new Tensor(_dim);
        rc._tensor = _tensor.clone();
        return rc;
    }
}

interface Layer {
    Tensor forward(Tensor in);
}

class Linear implements Layer {

    int col, row;
    float[][] weight;
    float[] bias;

    Linear(int input, int output) {
        this.col = input;
        this.row = output;
        this.weight = new float[row][col];
        this.bias = new float[row];
    }

    @Override
    public Tensor forward(Tensor in) {
        Tensor out = new Tensor(row);
        out._tensor = bias.clone();
        out._dim[0] = row;
        for(int i = 0; i < row; ++i) {
            for(int j = 0; j < col; ++i) {
                out._tensor[i] += weight[i][j] * in._tensor[j];
            }
        }
        return out;
    }

    public void init(float[][] weight, float[] bias) {
        this.weight = weight.clone();
        this.bias = bias.clone();
    }
}

class Relu implements Layer {

    @Override
    public Tensor forward(Tensor in) {
        Tensor out = new Tensor(in._dim);
        for(int i = 0 ; i < in._tensor.length; ++i) {
            if (in._tensor[i] >= 0) {
                out._tensor[i] = in._tensor[i];
            }
            else {
                out._tensor[i] = 0;
            }
        }
        return out;
    }
}

class Softmax implements Layer {

    @Override
    public Tensor forward(Tensor in) {
        Tensor out = new Tensor(in._dim);
        float sum = 0;
        for(int i = 0 ; i < in._tensor.length; ++i) {
            sum += in._tensor[i];
        }
        for(int i = 0; i < out._tensor.length; ++i) {
            out._tensor[i] = in._tensor[i] / sum;
        }
        return out;
    }
}

class NN {
    ArrayList<Layer> _net;

    NN(Layer ... layers) {
        _net.addAll(Arrays.asList(layers));
    }

    void readTorch(String path) {
        
    }

    void readTensorflow(String path) {

    }

    Tensor inference(Tensor input) {
        Tensor out = input.view();
        for (Layer layer : _net) {
            out = layer.forward(out);
        }
        return out;
    }

    static void readFileByBytes(String fileName) {
        try {
            //传入文件路径fileName，底层实现 new FileInputStream(new File(fileName));相同
            FileInputStream in = new FileInputStream(fileName);
            //每次读4个字节，放到数组里
            byte[] bytes = new byte[4];

            int c;
            while((c=in.read(bytes))!=-1) {
                String b = byteToArray(bytes);
                Long ieee754Int = Long.parseLong(b, 16);
                float realValue = Float.intBitsToFloat(ieee754Int.intValue());
                System.out.println(realValue);
            }
        } catch (Exception e) {
            System.out.println(e);
        }
    }

    static String byteToArray(byte[] data){
        String result = "";
        for (int i = 0; i < data.length; i++) {
            result += Integer.toHexString((data[i] & 0xFF) | 0x100).toUpperCase().substring(1, 3);
        }
        return result;
    }
}






