With the attached data file, build and visualize eigenimagery that accounts for 80% of the variability. Provide full R code and discussion.

Import Libraries

library(doParallel)
## Loading required package: foreach
## Loading required package: iterators
## Loading required package: parallel
library(jpeg)
library(foreach)
library(EBImage)
library(OpenImageR)
## 
## Attaching package: 'OpenImageR'
## The following objects are masked from 'package:EBImage':
## 
##     readImage, writeImage

Use of Graphics

Since we have 17 jpg images, we will set num to 17.

num = 17
files = list.files("C:/Users/tyler/Documents/Tyler Docs/CUNY SPS/Data Science/Fall 2022/DATA 605/Images/",pattern="\\.jpg")[1:num]

View Shoes Function

height=1200; width=2500;scale=20
plot_jpeg = function(path, add=FALSE)
{ jpg = readJPEG(path, native=T) # reads the file
  res = dim(jpg)[2:1] # gets the resolution, [x, y]
  if (!add) # initializes an empty plot area if add==FALSE
    plot(1,1,xlim=c(1,res[1]),ylim=c(1,res[2]),asp=1,type='n',xaxs='i',yaxs='i',xaxt='n',yaxt='n',xlab='',ylab='',bty='n')
  rasterImage(jpg,1,1,res[1],res[2])
}

Load the Data into an Array

We load the data from our images into an array with their dimensions, then resize to scale the images to a specific set of dimensions.

im=array(rep(0,length(files)*height/scale*width/scale*3), dim=c(length(files), height/scale, width/scale,3))

for (i in 1:17){
  temp=resize(readJPEG(paste0("C:/Users/tyler/Documents/Tyler Docs/CUNY SPS/Data Science/Fall 2022/DATA 605/Images/", files[i])),height/scale, width/scale)
  im[i,,,]=array(temp,dim=c(1, height/scale, width/scale,3))}

Vectorize

We vectorize the array with the dimensions.

flat=matrix(0, 17, prod(dim(im))) 
for (i in 1:17) {
  newim <- readJPEG(paste0("C:/Users/tyler/Documents/Tyler Docs/CUNY SPS/Data Science/Fall 2022/DATA 605/Images/", files[i]))
  r=as.vector(im[i,,,1]); g=as.vector(im[i,,,2]);b=as.vector(im[i,,,3])
  flat[i,] <- t(c(r, g, b))
}
shoes=as.data.frame(t(flat))

Actual Plot

We can visualize the plots.

par(mfrow=c(3,3))
par(mai=c(.3,.3,.3,.3))
for (i in 1:17){
plot_jpeg(writeJPEG(im[i,,,]))
}

Eigencomponents from Correlation Structure

scaled=scale(shoes, center = TRUE, scale = TRUE)
mean.shoe=attr(scaled, "scaled:center")
std.shoe=attr(scaled, "scaled:scale")

Calculate Convariance (Correlation)

Sigma_=cor(scaled)

Get the Eigencomponents

myeigen=eigen(Sigma_)
cumsum(myeigen$values) / sum(myeigen$values)
##  [1] 0.6928202 0.7940449 0.8451072 0.8723847 0.8913841 0.9076337 0.9216282
##  [8] 0.9336889 0.9433871 0.9524454 0.9609037 0.9688907 0.9765235 0.9832209
## [15] 0.9894033 0.9953587 1.0000000

Eigenshoes

As shown above, the second eigencomponent (0.7940449) is closest to a 80% varuability, therefore we will take the second position.

scaling=diag(myeigen$values[1:2]^(-1/2)) / (sqrt(nrow(scaled)-1))
eigenshoes=scaled%*%myeigen$vectors[,1:2]%*%scaling
imageShow(array(eigenshoes[,2], c(60,125,3)))