import numpy as np
import matplotlib.pyplot as plt


# prac01
class AMSTL:
    def isamstl(self, x):
        result = 0
        y = x
        while y != 0:
            num = y % 10
            y //= 10
            result += num ** 3
        return result == x

    def result(self):
        n = int(input())
        for i in range(10 ** (n - 1), 10 ** n):
            if self.isamstl(i):
                print(i)


# prac02
class Stack:
    def __init__(self):
        self.__top = 0
        self.__mylist = list()

    def isempty(self):
        return self.__top == 0

    def push(self, x):
        self.__mylist.append(x)
        self.__top = self.__top + 1

    def pop(self):
        if not self.isempty():
            self.__mylist.pop()
            self.__top = self.__top - 1

    def top(self):
        if not self.isempty():
            return self.__mylist[self.__top]

    def len(self):
        return self.__top

    def iterator(self):
        for i in range(0, self.__top):
            print(self.__mylist[self.__top])


# prac03
a = np.random.randint(0, 10, (4, 6))
print(a)

b = np.random.randint(0, 10, (1, 6))
print(b)

print(a + b)

c = np.reshape(a + b, (3, 8))
print(c)

c = np.sort(c)
print(c)

# prac04
ypoints = np.array([1, 2, 3, 4, 5, 6])
x = np.linspace(0, np.pi * 10, 40)
xpoints = np.sin(x)

plt.subplot(1, 2, 1)
plt.plot(ypoints, "r--o")
plt.title("first")

plt.subplot(1, 2, 2)
plt.plot(xpoints)
plt.title("second")

plt.show()

x = [i for i in range(1, 7)]
y = np.random.rand(6) * 10
plt.plot(x, y)
plt.show()

plt.scatter(x, y)
plt.show()


# prac05
def f(x):
    return x ** 4 + 3 * x ** 3 - 7 * x ** 2 + 2 * x - 1


x = np.linspace(-100, 100, 2001)  # split from -100 to 100 to get 2001 samples
y = f(x)
plt.plot(x, y)
plt.show()

t0 = 5
t1 = 5000


def LEARNING_RATE(t):
    return t0 / (t + t1)


ITERATIONS = 1000
EPS = 1e-5


# calcualte gradient
def cal_grad(x, f):
    # using difference instead if derivative
    return (f(x + EPS) - f(x)) / EPS


ext_point = 0  # get a random initial number between -100 and +100
print("initial ext_point:{:.2f}".format(ext_point))
log = []

for iter in range(ITERATIONS):
    log.append(ext_point)

    ext_point -= LEARNING_RATE(iter) * cal_grad(ext_point, f)
print("ext_point is {:.2f}".format(ext_point))

plt.plot(range(ITERATIONS), log)


# prac06
def linearf(x):
    return 3 * x + np.random.randn(x.shape[0])


x = np.linspace(-3, 3, 11)
y = linearf(x)
plt.scatter(x, y)

a = 1
b = 1
times = 10000
learning_rate = 0.001

for i in range(times):
    da = 2 * ((a * x + b - y) * x).sum()
    db = 2 * (a * x + b).sum()

    a = a - learning_rate * da
    b = b - learning_rate * db

final_y = a * x + b
plt.plot(x, final_y)
plt.show()
