# -*- coding: utf-8 -*-
"""
Created on Tue Dec  8 18:47:49 2020

@author: chris
"""
import os
os.environ["KMP_DUPLICATE_LIB_OK"]="TRUE"

import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import matplotlib.pyplot as plt
from torch.autograd import Variable

dobot_coordinate = np.array([[190.6171, 8.1419],
                             [185.7231, -7.098],
                             [202.1162, -14.6101],
                             [205.6441, 0.3899]], dtype="float32")

image_coordinate = np.array([[715.277, 811.442],
                             [1137.489, 933.282],
                             [1353.463, 600.833],
                             [946.073, 503.555]], dtype="float32")


class Net(nn.Module):
    def __init__(self, n_input, n_hidden, n_output):
        super(Net, self).__init__()
        self.hidden1 = nn.Linear(n_input, n_hidden)
        self.hidden2 = nn.Linear(n_hidden, n_hidden)
        #self.hidden3 = nn.Linear(n_hidden, n_hidden)
        self.predict = nn.Linear(n_hidden, n_output)

    def forward(self, input):
        out = self.hidden1(input)
        out = F.relu(out)
        out = self.hidden2(out)
        # out = F.relu(out)
        # out = self.hidden3(out)
        out = torch.sigmoid(out)
        out = self.predict(out)
        return out


x = torch.from_numpy(image_coordinate)
y = torch.from_numpy(dobot_coordinate)


net = Net(2, 100, 2)
# print(net)    # 打印网络结构看看

optimizer = torch.optim.SGD(net.parameters(), lr=0.00001)
loss_func = torch.nn.MSELoss()
loss_list = []
n = 100000
for t in range(n):
    prediction = net(x)
    loss = loss_func(prediction, y)
    optimizer.zero_grad()
    loss.backward()
    optimizer.step()
    loss_list.append(loss.item())
    if t%9999 == 0:
        print(loss.item())


num = np.arange(0, n)
plt.title("Matplotlib demo")
plt.xlabel("x axis caption")
plt.ylabel("y axis caption")
plt.plot(num, loss_list)
plt.show()



