#!/usr/bin/ruby

require "open3"
require "neuronet"
require "scanf"

class Component
    attr_reader :max, :min, :step
    attr_accessor :cur
    
    def initialize(min, max, step)
        @max, @min, @step, @cur = max, min, step, 0
    end

    def each(&block)
        @min.step(@max, @step) { |x| yield(x) }
    end
end

class NeuronetManager
    @@prolog = "prolog/prolog5.pl"
    @@net = "nets/5.net30sigmoid"
    @@ts = "ts/all.ts"
    @@result = "results/5.30sigm"

    @@teachinglevel = 20
    @@start_x = 0
    @@end_x = 1000
    @@delta_x = 50
    @@start_y = 0
    @@end_y = 100
    @@delta_y = 10

    @@eta_new_user = 1
    @@eta_teach = 0.2

    @@num_of_dots_around_to_activate = 6
    @@num_of_dots_around_to_teach = 6

    @@percent_of_good_samples_new_user = 0.8
    @@threshold_to_be_good_new_user = 0.3

    # Teaching interval and graph interval
    def initialize(tl, tlOneNode, gi)
        @traininglevel = tl
        @oneNodeTrainingLevel = tlOneNode

        @start_x = gi[0][0]
        @delta_x = gi[0][1]
        @end_x = gi[0][2]

        @start_y = gi[1][0]
        @delta_y = gi[1][1]
        @end_y = gi[1][2]

        # Загрузка нейросетей
        @nets = {}
        Dir.chdir("./nets2")
        Dir.glob("**").each do |dir|
            @nets[dir] = {}
            Dir.chdir(dir)

            Dir.glob("*").each do |file|
                @nets[dir][file] = TwoLayerPerceptron.new(64,1)
                @nets[dir][file].read_from_file(file)
            end

            Dir.chdir("..")
        end
        Dir.chdir("..")
        #p "Nets have been loaded"
    end

    # Вычисление ответов нейросети в разных частях сетки
    def countAllDotsInTheNet(buyer_address, user_name)
        file = File.new("graphdata/"+buyer_address+"."+user_name, "w+")

        component = [Component.new(@start_x+@delta_x, @end_x, @delta_x), Component.new(@start_y+@delta_y, @end_y, @delta_y)]

        s = ""
        0.upto(component.size - 1) do |i| 
            s += "component[#{i}].each do |c#{i}|\n"
        end

        s += "file.printf(\"%d %d %f\n\","
        0.upto(component.size - 1) do |i| 
            s += "c#{i}, "
        end
        s += "@nets[buyer_address][user_name].activate("

        0.upto(component.size - 1) do |i| 
            s += "to_32bin(c#{i}) + "
        end
        s = s[0..-4]
        s += ")[0])\n"

        component.each_with_index do |c, i|
            s += "file.printf(\"\n\")" if i > 0
            s+= "end\n"
        end

        eval s
        file.close
        #p "Counting all dot's in the net is done"
    end

    # Генерация тренировочной выборки 
    def genTrainingSet(prolog)
        component = [Component.new(@start_x,@end_x,@delta_x), Component.new(@start_y,@end_y,@delta_y)]

        array = []
        s = ""
        0.upto(component.size - 1) do |i| 
            s += "component[#{i}].each do |c#{i}|\n"
        end
        s += "xy = []\n"

        0.upto(component.size - 1) do |i| 
            s += "xy += [c#{i}]\n"
        end
            
        s += 
            "
            stdin, stdout, stderr = Open3.popen3(\"swipl -s #{prolog}\");
            stdin.puts \"run("

        0.upto(component.size - 1) do |i| 
            s += "\#{c#{i}},"
        end


        s = s[0..-2]
        s +=
            ").\n\"
            stdin.close
            if stdout.readlines === [\"true.\n\"]
                v = 1
            else
                v = 0
            end
            stdout.close
            stderr.close
            array += [[xy[0], xy[1], v]]\n" 
        component.each do |c|
            s+= "end\n"
        end

        #puts s 

        eval s
        #p "End of training set generation"
        array
    end

    def to_32bin(n)
        m = 32
        out = Array.new(m)
        while (n > 0)
            out[m - 1] = n % 2
            n /= 2
            m -= 1
        end
        1.upto(m) { |i| out[i - 1] = 0 }
        out
    end 

    # Теперь нужно по данной обучающей выборке обучить нейросеть
    def newUser(buyer_address, user_name, prolog)
        ts = genTrainingSet(prolog)
        twp = TwoLayerPerceptron.new(64,1)

        Dir.chdir("nets2");
        all = 2 
        good = 1
        while good.to_f / all < @@percent_of_good_samples_new_user do
            all = 0 
            good = 0
            ts.each do |a|   
                if ((activateNet(twp, [a[0], a[1]]) - a[2]).abs > @@threshold_to_be_good_new_user)
                    twp.learn(to_32bin(a[0]) + to_32bin(a[1]), a[2], @@eta_new_user) 
                end
            end
            ts.each do |a|   
                good += 1 if ((activateNet(twp, [a[0], a[1]]) - a[2]).abs < @@threshold_to_be_good_new_user)
                all += 1   
            end
            #p good
        end

        begin
            Dir.mkdir(buyer_address)
        rescue
        end

        Dir.chdir("..");
        twp.save_to_file("nets2/"+buyer_address+"/"+user_name)

        @nets[buyer_address][user_name] = twp;

        p "success"
    end

    def countProbability(m1, m2, s, x1, x2)
        #(1 / (2*3.1415*s*s)) * Math.exp(-0.5 * ( (x1-m1)*(x1-m1)/(2*s*s) + (x2-m2)*(x2-m2)/(2*s*s) ))
        (1 / (6.283*s*s)) * Math.exp( (-(x1-m1)*(x1-m1) - (x2-m2)*(x2-m2)) / (4*s*s) )
    end

    def genNumberWithStandartGaussDistrib
        x1, x2 = rand, rand
        #y1 = Math.sqrt(-2 * Math.log(x1)) * Math.cos(2 * 3.1415 * x2);
        #y2 = Math.sqrt(-2 * Math.log(x1)) * Math.sin(2 * 3.1415 * x2);
        y1 = Math.sqrt(-2 * Math.log(x1)) * Math.cos(6.283 * x2);
        y2 = Math.sqrt(-2 * Math.log(x1)) * Math.sin(6.283 * x2);
        [y1, y2]
    end

    def genNumbersWithGaussDistrib(number, m1, m2, sigma)
        srand
        res = []
        number.times do |i|
            x = genNumberWithStandartGaussDistrib
            x1, x2 = x[0], x[1]
            y1, y2 = sigma * x1 + m1, sigma * x2 + m2

            prob = (1 / (sigma * sigma)) * countProbability(m1, m2, sigma, y1, y2)

            res[i] = [y1, y2, prob]
            #printf("%f %f %f\n", y1, y2, prob)
        end
        res
    end
        
    # Вычисление значения нейросети в заданной точке
    def activateNet(net, input)
        n = @@num_of_dots_around_to_activate
        sum = 0
        probsum = 0
        inputGauss = genNumbersWithGaussDistrib(n, input[0], input[1], 1.9)
        n.times do |i|
            inp = [] + to_32bin(inputGauss[i][0].round) + to_32bin(inputGauss[i][1].round)
            sum += net.activate(inp)[0] * inputGauss[i][2]
            probsum += inputGauss[i][2]
        end
        sum /= probsum
    end

    # Вычисление значения нейросети в заданной точке
    def activate(buyer_address, user_name, input)
        n = @@num_of_dots_around_to_activate
        sum = 0
        probsum = 0
        inputGauss = genNumbersWithGaussDistrib(n, input[0], input[1], 1.9)
        n.times do |i|
            inp = [] + to_32bin(inputGauss[i][0].round) + to_32bin(inputGauss[i][1].round)
            sum += @nets[buyer_address][user_name].activate(inp)[0] * inputGauss[i][2]
            probsum += inputGauss[i][2]
        end
        sum /= probsum
    end

    # Обучение нейросети
    def teach(buyer_address, user_name, input, output)
        n = @@num_of_dots_around_to_teach
        inputGauss = genNumbersWithGaussDistrib(n, input[0], input[1], 1)
        all = 2 
        good = 1

        while good.to_f / all < @@percent_of_good_samples_new_user do
            all = 0
            good = 0
            n.times do |i|
                inp = [] + to_32bin(inputGauss[i][0].round) + to_32bin(inputGauss[i][1].round)
                @nets[buyer_address][user_name].learn(inp, output, @@eta_teach)
                all += 1
            end

            n.times do |i|
                good += 1 if ((activateNet(@nets[buyer_address][user_name], [inputGauss[i][0].round, inputGauss[i][1]]) - output).abs < @@threshold_to_be_good_new_user)
            end
            #p good.to_f / all
        end

        @nets[buyer_address][user_name].save_to_file("nets2/" + buyer_address + "/" + user_name);
        p "success"
    end
end

nm = NeuronetManager.new(ARGV[0].to_i, ARGV[1].to_i, [[0, 10, 100], [0, 50, 1000]])
#p nm.genTrainingSet("prolog/prolog8.pl")
#nm.genNumbersWithGaussDistrib(500, 50, 100, 1.9)
#nm.newUser("systema@chigorin-laptop", "user_1", "prolog/prolog7.pl")
#nm.newUser("xelax-a1@dev.iu7.bmstu.ru", "user_2", "prolog/prolog5.pl")
#nm.teach("systema@chigorin-laptop", "user_3", [30, 200], 1)
#p nm.activate("systema@chigorin-laptop", "user_3", [90, 0])
#p nm.activate("systema@chigorin-laptop", "user_3", [90, 100])
#p nm.activate("systema@chigorin-laptop", "user_3", [90, 200])
#p nm.activate("systema@chigorin-laptop", "user_3", [90, 300])
#p nm.activate("systema@chigorin-laptop", "user_3", [90, 400])
#p nm.activate("systema@chigorin-laptop", "user_3", [90, 500])
#nm.countAllDotsInTheNet("systema@chigorin-laptop", "user_1")

case ARGV[2]
    when "listen"
        while(line = scanf("%s"))
            case line[0]
                when "newUser"
                    ba,un,prolog,* = scanf("%s %s %s");
                    nm.newUser(ba, un, prolog)
                when "activate"
                    ba,un,i1,i2,* = scanf("%s %s %d %d");
                    p nm.activate(ba, un, [i1, i2])
                when "teach"
                    ba,un,i1,i2,r1,* = scanf("%s %s %d %d %d");
                    nm.teach(ba, un, [i1, i2], r1);
                when "exit"
                    exit
            end
        end
    when "genGraphData"
        p "Generating data"
        nm.countAllDotsInTheNet(ARGV[3], ARGV[4])
        p "done"

else
    abort "Wrong argument"
end
