library(Matrix)
library(matrixcalc)
In this problem, we’ll verify using R that SVD and Eigenvalues are related. Given a 3 × 2 matrix A \[\mathbf{A} = \left[\begin{array} {rrr} 1 & 2 & 3 \\ -1 & 0 & 4 \end{array}\right] \] write code in R to compute \(X = AA^T\) and \(Y = A^TA\) Then, compute the eigenvalues and eigenvectors of X and Y using the built-in commands in R.
A = matrix(c(1,2,3,-1,0,4), nrow = 2, byrow = TRUE)
X = t(A)%*%A
X
## [,1] [,2] [,3]
## [1,] 2 2 -1
## [2,] 2 4 6
## [3,] -1 6 25
Y = A%*%t(A)
Y
## [,1] [,2]
## [1,] 14 11
## [2,] 11 17
eX = eigen(X)
eX
## eigen() decomposition
## $values
## [1] 2.660180e+01 4.398198e+00 1.058982e-16
##
## $vectors
## [,1] [,2] [,3]
## [1,] -0.01856629 -0.6727903 0.7396003
## [2,] 0.25499937 -0.7184510 -0.6471502
## [3,] 0.96676296 0.1765824 0.1849001
eY = eigen(Y)
eY
## eigen() decomposition
## $values
## [1] 26.601802 4.398198
##
## $vectors
## [,1] [,2]
## [1,] 0.6576043 -0.7533635
## [2,] 0.7533635 0.6576043
Then, compute the left-singular, singular values, and right-singular vectors of \(A\) using the svd command. Examine the two sets of singular vectors and show that they are indeed eigenvectors of \(X\) and \(Y\). In addition, the two non-zero eigenvalues (the 3rd value will be very close to zero, if not zero) of both \(X\) and \(Y\) are the same and are squares of the non-zero singular values of \(A\).
svd(A)
## $d
## [1] 5.157693 2.097188
##
## $u
## [,1] [,2]
## [1,] -0.6576043 -0.7533635
## [2,] -0.7533635 0.6576043
##
## $v
## [,1] [,2]
## [1,] 0.01856629 -0.6727903
## [2,] -0.25499937 -0.7184510
## [3,] -0.96676296 0.1765824
#compare to see if Left-singular values vector U = vector Y?
svd(A)$u == eY$vectors
## [,1] [,2]
## [1,] FALSE TRUE
## [2,] FALSE TRUE
UL = svd(A)$u
UL[,1] = -UL[,1]
round(UL,digits=5) == round(eY$vectors,digits=5)
## [,1] [,2]
## [1,] TRUE TRUE
## [2,] TRUE TRUE
#compare to see if Right-singular values vector V = vector X?
svd(A)$v == eX$vectors[3:2]
## [,1] [,2]
## [1,] FALSE FALSE
## [2,] FALSE FALSE
## [3,] FALSE FALSE
VL = svd(A)$v
VL[,1] = -VL[,1]
round(VL,digits=3) == round(eX$vectors[1:3,1:2],digits=3)
## [,1] [,2]
## [1,] TRUE TRUE
## [2,] TRUE TRUE
## [3,] TRUE TRUE
#check to see if Non-zero eigenvalues of X & Y = square of non-zero singular values of A?
DEV=svd(A)$d^2
DEV
## [1] 26.601802 4.398198
round(DEV,digits=5) == round(eX$values[1:2],digit=5)
## [1] TRUE TRUE
Eigenvectors of X are the left-singular matrix (u) of A. Eigenvectors of Y are the right-singular matrix (v) of A.
Using the procedure outlined in section 1 of the weekly handout, write a function to compute the inverse of a well-conditioned full-rank square matrix using co-factors. In order to compute the co-factors, you may use built-in commands to compute the determinant. Your function should have the following signature: \(B = myinverse(A)\)
myInverse = function(A){
#Check if square
if(dim(A)[1] != dim(A)[2]){ return('ERROR : Matrix is not square') }
#Check if determinant is 0
if(det(A) == 0){ return('ERROR : Matrix is singular') }
#Create co-factor matrix
coMatrix = A * 0
#Cofactoring proceedure
for (i in 1:ncol(A)) {
for (j in 1:nrow(A)) {
coMatrix[i,j] = det(A[-i,-j]) * (-1)^(i+j)
}}
inversed = t((coMatrix)/det(A))
return(inversed)
}
A = matrix(c(1,2,4,3,4,3,3,1,1,3,1,8,2,1,7,1),nrow=4)
A
## [,1] [,2] [,3] [,4]
## [1,] 1 4 1 2
## [2,] 2 3 3 1
## [3,] 4 3 1 7
## [4,] 3 1 8 1
B = myInverse(A)
B
## [,1] [,2] [,3] [,4]
## [1,] -0.9285714 1.2142857 0.14285714 -0.35714286
## [2,] 0.2142857 0.1428571 -0.07142857 -0.07142857
## [3,] 0.2714286 -0.3857143 -0.05714286 0.24285714
## [4,] 0.4000000 -0.7000000 0.10000000 0.20000000
#A x B = I
I = round(A %*% B)
I
## [,1] [,2] [,3] [,4]
## [1,] 1 0 0 0
## [2,] 0 1 0 0
## [3,] 0 0 1 0
## [4,] 0 0 0 1
C = solve(A)
round(B,4)==round(C,4)
## [,1] [,2] [,3] [,4]
## [1,] TRUE TRUE TRUE TRUE
## [2,] TRUE TRUE TRUE TRUE
## [3,] TRUE TRUE TRUE TRUE
## [4,] TRUE TRUE TRUE TRUE