#!/usr/bin/env python

from math import e, sin, cos
from bernstein import basis_fun_eval, basis_fun_derivative, interpolant

def evaluate_fun(x,a,b,n,beta):
	'''
Defines RHS entry of linear system in Newton iteration.
Change return line, and define all terms you need. F
For example, for k-th derivative y^(k):

dyk_dxk = 0.
for j in range(n+1):
  dyk_dxk += beta[j]*basis_fun_derivative(k,j,n,a,b,x) 

NOTE: In the example problem we solve y' = y**2+x, x in[0,0.9], x(0) =1., x(0.9)=32.725
So F(x) = y'-y**2-x

	'''
	dydx = 0.
	for j in range(n+1):
		dydx += beta[j]*basis_fun_derivative(1,j,n,a,b,x)

	y = interpolant(beta,n,a,b,x)

	return dydx-y*y-x # change this - your eq. in implicit form


def evaluate_jac(x,j,a,b,n,beta):
	'''
Defines Jacobian entry. 
Jacobian itself is used as LHS matrix in Newton iteration.
D(i,j) = \partial F_i / \partial beta_j; F_i = F(x_i)

NOTE: Solution is represented as Bernstein interpolant y = sum_i=0^i=n (beta(i)*B_{i,n}(x)),
and then we get Jacobian by making derivatives in different x's with different beta's, and eventualy we make Jacobian matrix.
	'''
	y = interpolant(beta,n,a,b,x)

	return basis_fun_derivative(1,j,n,a,b,x) - 2*y*basis_fun_eval(j,n,a,b,x) # change this to Jacobian entry resulting from linearization of your non-linear system. 

