install and read data

> #install.packages("pls")
> library(pls)
> #install.packages("readxl")
> library(readxl)
> lula <- read_excel("lula.xls", sheet = "data")
> dim(lula)
[1]  47 105

only suicide of economic reason

> lula.pls <- plsr( r3eco ~ ., ncomp = 10, data = lula[,-c(1:4,6:9)], validation = "LOO")
> summary(lula.pls)
Data:   X dimension: 47 96 
    Y dimension: 47 1
Fit method: kernelpls
Number of components considered: 10

VALIDATION: RMSEP
Cross-validated using 47 leave-one-out segments.
       (Intercept)  1 comps  2 comps  3 comps  4 comps  5 comps  6 comps
CV           0.787   0.9343   0.8510   0.8849   0.8409   0.8400   0.8793
adjCV        0.787   0.9286   0.8515   0.8865   0.8383   0.8361   0.8743
       7 comps  8 comps  9 comps  10 comps
CV      0.9188   0.9995    1.021      1.06
adjCV   0.9127   0.9914    1.012      1.05

TRAINING: % variance explained
       1 comps  2 comps  3 comps  4 comps  5 comps  6 comps  7 comps  8 comps
X        14.65    42.32    60.30    70.56    76.56    78.85    81.91    83.39
r3eco    22.21    34.93    47.57    59.67    66.62    75.89    80.86    86.37
       9 comps  10 comps
X        84.71     85.72
r3eco    90.03     92.51
> plot(RMSEP(lula.pls), legendpos = "topright")

> plot(lula.pls, ncomp = 5, asp = 1, line = TRUE)

> plot(lula.pls, plottype = "scores", comps = 1:3)

> explvar(lula.pls)
   Comp 1    Comp 2    Comp 3    Comp 4    Comp 5    Comp 6    Comp 7    Comp 8 
14.654019 27.669061 17.979698 10.257516  6.004438  2.282651  3.060802  1.486479 
   Comp 9   Comp 10 
 1.312603  1.008910 

suicide of all reasons (canonical)

> lula.pls <- plsr( r1fam+r2hel+r3eco+r4wor+r5lov+r6sch+r7oth ~ ., ncomp = 10, data = lula[,-(1:2)], validation = "LOO")
> summary(lula.pls)
Data:   X dimension: 47 96 
    Y dimension: 47 1
Fit method: kernelpls
Number of components considered: 10

VALIDATION: RMSEP
Cross-validated using 47 leave-one-out segments.
       (Intercept)  1 comps  2 comps  3 comps  4 comps  5 comps  6 comps
CV           3.666     3.75    4.284    3.868    4.413    4.328    4.594
adjCV        3.666     3.75    4.266    3.819    4.349    4.312    4.569
       7 comps  8 comps  9 comps  10 comps
CV       4.903    5.280    5.612     5.796
adjCV    4.870    5.241    5.565     5.741

TRAINING: % variance explained
                                                       1 comps  2 comps
X                                                       37.280    46.86
r1fam + r2hel + r3eco + r4wor + r5lov + r6sch + r7oth    7.079    26.44
                                                       3 comps  4 comps
X                                                        51.37    59.48
r1fam + r2hel + r3eco + r4wor + r5lov + r6sch + r7oth    50.65    58.46
                                                       5 comps  6 comps
X                                                        75.58    78.47
r1fam + r2hel + r3eco + r4wor + r5lov + r6sch + r7oth    61.66    71.54
                                                       7 comps  8 comps
X                                                        81.59    83.58
r1fam + r2hel + r3eco + r4wor + r5lov + r6sch + r7oth    77.74    83.22
                                                       9 comps  10 comps
X                                                        84.91     85.79
r1fam + r2hel + r3eco + r4wor + r5lov + r6sch + r7oth    88.38     92.91
> plot(RMSEP(lula.pls), legendpos = "topright")

> plot(lula.pls, ncomp = 3, asp = 1, line = TRUE)

> plot(lula.pls, plottype = "scores", comps = 1:3)

> explvar(lula.pls)
    Comp 1     Comp 2     Comp 3     Comp 4     Comp 5     Comp 6     Comp 7 
37.2799887  9.5800403  4.5093864  8.1130185 16.0937437  2.8927401  3.1243833 
    Comp 8     Comp 9    Comp 10 
 1.9841437  1.3292289  0.8799715 

only wakayama

> lula.pls <- plsr( r3eco ~ ., ncomp = 10, data = lula[-30,-c(1:4,6:9)], validation = "LOO")
> summary(lula.pls)
Data:   X dimension: 46 96 
    Y dimension: 46 1
Fit method: kernelpls
Number of components considered: 10

VALIDATION: RMSEP
Cross-validated using 46 leave-one-out segments.
       (Intercept)  1 comps  2 comps  3 comps  4 comps  5 comps  6 comps
CV          0.7759   0.9563   0.8463   0.8317   0.8668    0.849   0.8942
adjCV       0.7759   0.9458   0.8469   0.8265   0.8637    0.845   0.8886
       7 comps  8 comps  9 comps  10 comps
CV      0.9296   0.9851    1.025     1.064
adjCV   0.9235   0.9772    1.015     1.054

TRAINING: % variance explained
       1 comps  2 comps  3 comps  4 comps  5 comps  6 comps  7 comps  8 comps
X         8.17    39.56    52.76    67.90    76.71    78.90    82.10    83.72
r3eco    28.17    37.02    52.81    59.36    65.63    75.99    80.52    86.00
       9 comps  10 comps
X        84.84     85.83
r3eco    90.10     92.45
> plot(RMSEP(lula.pls), legendpos = "topright")

> plot(lula.pls, ncomp = 3, asp = 1, line = TRUE)

> plot(lula.pls, plottype = "scores", comps = 1:3)

> explvar(lula.pls)
    Comp 1     Comp 2     Comp 3     Comp 4     Comp 5     Comp 6     Comp 7 
 8.1703482 31.3905486 13.2014184 15.1389823  8.8094600  2.1843520  3.2080489 
    Comp 8     Comp 9    Comp 10 
 1.6195230  1.1186161  0.9854129 
> predict(lula.pls, ncomp = 3, newdata = lula[30,-c(1:4,6:9)])
, , 3 comps

     r3eco
1 2.712462
> lula$r3eco[30]
[1] 3.821719