Problem Set 1

Given a 3 x 2 matrix A, compute X=AAT and Y=ATA

\[\mathbf{A} = \left[\begin{array} {rrr} 1 & 2 & 3 \\ -1 & 0 & 4 \\ \end{array}\right] \]

A <- matrix (c(1,2,3,-1,0,4),2,3,byrow = TRUE)

A
##      [,1] [,2] [,3]
## [1,]    1    2    3
## [2,]   -1    0    4

Computing X

X <- A%*%t(A)

X
##      [,1] [,2]
## [1,]   14   11
## [2,]   11   17

Computing Y

Y <- t(A)%*%A

Y
##      [,1] [,2] [,3]
## [1,]    2    2   -1
## [2,]    2    4    6
## [3,]   -1    6   25

Eigenvalues & Eigenvectors

Compute eigenvalues and eigenvectors of X and Y using the built-in command in R.

valx <- eigen(X)

eigenvalues_x <- valx$values

eigenvalues_x
## [1] 26.601802  4.398198
valy <- eigen(Y)

eigenvalues_y <- valy$values

eigenvalues_y
## [1] 2.660180e+01 4.398198e+00 1.058982e-16
#vectors

eigenvectors_x <- valx$vectors

eigenvectors_x
##           [,1]       [,2]
## [1,] 0.6576043 -0.7533635
## [2,] 0.7533635  0.6576043
eigenvectors_y <- valy$vectors

eigenvectors_y
##             [,1]       [,2]       [,3]
## [1,] -0.01856629 -0.6727903  0.7396003
## [2,]  0.25499937 -0.7184510 -0.6471502
## [3,]  0.96676296  0.1765824  0.1849001

SVD

Compute left-singular, singular values and right-singular vectors of A using the svd command. Examine the two sets of singular vectors and show that they are indeed eigenvectors of X and Y. In addition, the two non-zero eigenvalues of both X and Y are the same and are squares of the non-zero singular values of A.

I have calculated each below in the following order: singular values, left-singular vectors, right-singular vectors

svd_A <- svd(A)

svd_A$d
## [1] 5.157693 2.097188
svd_A$u
##            [,1]       [,2]
## [1,] -0.6576043 -0.7533635
## [2,] -0.7533635  0.6576043
svd_A$v
##             [,1]       [,2]
## [1,]  0.01856629 -0.6727903
## [2,] -0.25499937 -0.7184510
## [3,] -0.96676296  0.1765824
#store as variables
single <- svd_A$d
left <- svd_A$u
right <- svd_A$v

#refer to eigenvectors from above exercise
abs(round(left[,1],15)) == abs(round(eigenvectors_x [,1],15))
## [1] TRUE TRUE
abs(left [,2]) == abs(eigenvectors_x [,2])
## [1] TRUE TRUE
abs(round(right [,1],15)) == abs(round(eigenvectors_y [,1],15))
## [1] TRUE TRUE TRUE
round(right [,2],14) == round(eigenvectors_y [,2],14)
## [1] TRUE TRUE TRUE
#Two non-zero eigenvalues from above exercise are indeed squares of the non-zero singular values

(single)^(2)
## [1] 26.601802  4.398198
round((single)^(2),14) == round((eigenvalues_x),14)
## [1] TRUE TRUE

Problem Set 2

Function to compute the inverse of a well-conditioned full-rank square matrix using cofactors.

myinverse <- function(z) {

  column_count = length(z[1,])
  row_count = length(z[,1])
  sequencer = seq(1, column_count, by=1)
  # a square matrix with zero filled values
  cfactor <- matrix(0:0, column_count, row_count)
  # checking if it is a square matrix
  if (column_count == row_count){
  for (i in sequencer) {

      for (x in sequencer) {

        #built in determinant

        cfactor[i,x] <- det(z[-i,-x])*(-1)^(i+x)}
  }

  }

  else {

    print ("The input matrix is not square. The number of columns and rows must match.")

  }

  inverse <- t(cfactor)/det(z)

  return(inverse)

}
a <- matrix (c(0,0,1,2,-1,3,1,1,4),3,3,byrow = TRUE)
a
##      [,1] [,2] [,3]
## [1,]    0    0    1
## [2,]    2   -1    3
## [3,]    1    1    4
B = myinverse(a)
B
##           [,1]       [,2]      [,3]
## [1,] -2.333333  0.3333333 0.3333333
## [2,] -1.666667 -0.3333333 0.6666667
## [3,]  1.000000  0.0000000 0.0000000
I = (a%*%B)
I
##               [,1] [,2] [,3]
## [1,]  1.000000e+00    0    0
## [2,]  0.000000e+00    1    0
## [3,] -8.881784e-16    0    1