Problem 1:

\(f(x) = \frac{(3x^4 - 4x^3)}{12}\) \(f(x)'=x^3 - x^2\) \(f(x)''=3x^2 - 2x\)

\(x_{k + 1} = x_k - \frac{x^3 - x^2}{3x^2 - 2x}\)

step <- -10:10

newton <- function(x){
  x - (x^3 - x^2)/(3*x^2 - 2*x)
}

for (n in step) {
  result <- round(newton(n),3)
  print(result)
}
## [1] -6.562
## [1] -5.897
## [1] -5.231
## [1] -4.565
## [1] -3.9
## [1] -3.235
## [1] -2.571
## [1] -1.909
## [1] -1.25
## [1] -0.6
## [1] NaN
## [1] 1
## [1] 1.5
## [1] 2.143
## [1] 2.8
## [1] 3.462
## [1] 4.125
## [1] 4.789
## [1] 5.455
## [1] 6.12
## [1] 6.786

Prob 2

input_func <- function(x) {
  (3*x^4 - 4*x^3) / 12
}

result <- optimize(input_func, interval = c(-10,10), tol = 0.01)
result
## $minimum
## [1] 1.000417
## 
## $objective
## [1] -0.08333325

Problem 3

I’m going to use Newton’s method

# define function
f <- function(x,y) {
  return((x-1)^2 + 100*(y-x^2)^2)
}

# define the gradient
grad_f <- function(x,y) {
  return(c(2*(x-1) - 400*x*(y-x^2), 200*(y-x^2)))
}

# define the Hessian matrix
hess_f <- function(x,y) {
  return(matrix(c(2-400*(y-x^2)+1200*x^2, -400*x, -400*x, 200), nrow=2, ncol=2))
}

x <- c(0, 0)
iter <- 0


while (iter < 100) {

  grad <- grad_f(x[1], x[2])
  hess <- hess_f(x[1], x[2])
  
  d <- -solve(hess) %*% grad
  
  alpha <- 1
  while (f(x[1]+alpha*d[1], x[2]+alpha*d[2]) > f(x[1], x[2]) + 0.5*alpha*t(grad)%*%d) {
    alpha <- alpha/2
  }
  
  x_new <- x + alpha*d
  
  if (abs(f(x_new[1], x_new[2]) - f(x[1], x[2])) < 1e-6) {
    break
  }
  
  iter <- iter + 1
  x <- x_new
}

print(f(x[1], x[2]))
## [1] 0.03901471
print(round(x, 3))
##       [,1]
## [1,] 0.802
## [2,] 0.644

Problem 4

I can’t this to work:(

my_func <- function(x)
  {(x[1]-1)^2 + 100*(x[1]-x[2]^2)^2}


library(optimr)
optim(par = 2, fn = my_func, method = "L-BFGS-B", lower = -10, upper = 10)