import numpy as np
import matplotlib.pyplot as plt
from random import randint



#python task 1 
n=input("")
n=int (n)
num=10**(n-1)
while (num<10**n):
    sub_num=num
    sum=0
    while (sub_num>0):
        sum=sum+(sub_num%10)**n
        sub_num//=10
    if (sum==num):
        print(num)
    num+=1

#python task 2
class stack():
    def __init__(self):
        self.items=[]
    def push (self,item):
        return self.items.append(item)
    def pop (self):
        return self.items.pop()
    def top (self):
        return self.items[len(self.items)-1]
    def isEmpty (self):
        return self.items==[]
    def length (self):
        return len(self.items)
    def iterate (self):
        for x in list(self.items):
            print (x,end=" ")

#task 3
a=np.empty([4,6],dtype=int)
b=np.ones([1,6],dtype=int)
c=a+b
print(a)
print(b)
print(c)
c=c.reshape(3,8)
print (c)
c.sort()
print("",c)

#task 4
x=np.arange(1,7,1)
a=np.random.randint(50,size=6)
y=np.array(a)
plt.subplot(1, 2, 1)
plt.plot(x,y,marker='o',color='r')
plt.title("plot 1")

plt.subplot(1, 2, 2)
colors = np.random.randint(100,size=6)
plt.scatter(x,y,marker='*',c=colors)
plt.title("plot 2")
plt.suptitle("scatter")
plt.show()




#task 5 Gradient Descent
def g(x):
    return x**4+3*x**3-7*x**2+2*x-1

x = np.linspace(-6,5,2001) 
y = g(x)
plt.plot(x,y)
LEARNING_RATE = 0.01
ITERATIONS = 1000
EPS = 1e-5
def cal_grad(x,g):#get limit
    return (g(x + EPS) - g(x)) / EPS

ext_point = -6
print("initial ext_point:{:.2f}".format(ext_point))
log= []
log_2=[]

for iter in range(ITERATIONS):
    log.append(ext_point)
    log_2.append(g(ext_point))
    ext_point -= LEARNING_RATE * cal_grad(ext_point,g)
print("ext_point is {:.2f}".format(ext_point))
plt.scatter(log,log_2,marker='o',c='r')
plt.show()




#task 6 min loss
def linearf(x):
    return 3*x + np.random.randn(x.shape[0])
x = np.linspace(-3,3,11)
y = linearf(x)

LEARNING_RATE = 0.01
ITERATIONS = 1000
EPS=1e-5

b= float(randint(-5,5))
k= float(randint(2,4))
plt.scatter(x,y)
plt.plot(x,k*x+b,c='r')

print("initial k,b:{:.2f},{:.2f}".format(k,b))
loss=0
for i in range(11):
    loss+=((y[i]-k*x[i]-b)**2)/11
print("{:.2f}".format(loss))                                                         #calcuate the loss
def cal_grad_k(k,b,x,y):
    DELTA_k=0
    for i in range(11):
        DELTA_k+=((y[i]-(k+EPS)*x[i]-b)**2)/11-((y[i]-k*x[i]-b)**2)/11
    return DELTA_k/EPS 
def cal_grad_b(k,b,x,y):
    DELTA_b=0
    for i in range(11):
        DELTA_b+=((y[i]-k*x[i]-b-EPS)**2)/11-((y[i]-k*x[i]-b)**2)/11
    return DELTA_b/EPS                                                               #calcuate the Gradient
for iter in range(ITERATIONS):
    k-=LEARNING_RATE*cal_grad_k(k,b,x,y)
    b-=LEARNING_RATE*cal_grad_b(k,b,x,y)                                             #begin learning
plt.plot(x,k*x+b)
print("k,b:{:.2f},{:.2f}".format(k,b))
loss=0
for i in range(11):
    loss+=((y[i]-k*x[i]-b)**2)/11
print("{:.2f}".format(loss))                                                         #compare the loss again
plt.show()
