C ode Setup.
Load required packages.

library(tidyverse)
library(keras)
library(tensorflow)

Question 1:

Weighted least squares. When input vector x is weighted by a vector w , the least squares solution is obtained as :
\[ \mu_{optimal } = \frac{\sum_{i=1}^{n} (w_{i} *x_{i})} {\sum_{i=1}^{n} w_{i}} \]

# Question 1
x = c(1.57, 1.25, 2.80, 0.43)
w =c(2, 2, 1, 1)
optimal_mu = sum(x*w)/ sum(w)
print(paste("Optimal value ", optimal_mu))
## [1] "Optimal value  1.47833333333333"

Question 2: Linear Regression

# Question 2 
# Load dataset
shhs =  read.delim("/Users/archanabalan/Documents/Spring Semester/DS/shhs.txt")
# Add a new column as: log(rdi4p + 1)
log_add = function(x){
  log(x + 1)
}
shhs$log_rdi4p = unlist(lapply(shhs$rdi4p, log_add))
# Fit a linear regression modl
model_q2 = lm(log_rdi4p ~ waist + age_s1 + gender + bmi_s1, data = shhs,na.action=na.exclude)
model_coef = coef(model_q2)
print("The model coefficients are:" )
## [1] "The model coefficients are:"
print(model_coef)
##  (Intercept)        waist       age_s1       gender       bmi_s1 
## -2.281391771  0.007058175  0.019982473  0.517820827  0.063067594

Question 3: Predicting using linear model

Inputs: waist =100, age = 60, gender = 0 (male) , bmi = 30

# Question 3
# waist =100, age = 60, gender = 0 (male) , bmi = 30
input_data = c(100,60,0,30)
names(input_data)= c("waist", "age_s1", "gender", "bmi_s1")
out_log_rdi4p = predict(model_q2, newdata = as.data.frame(t(input_data)))
out_rdi4p = exp(out_log_rdi4p)-1
print(paste("The estimated value of log(rdi4p + 1) is ", out_log_rdi4p))
## [1] "The estimated value of log(rdi4p + 1) is  1.51540196188145"
print(paste("Thus, the  estimated value of rdi4p  is ", out_rdi4p))
## [1] "Thus, the  estimated value of rdi4p  is  3.55125018830316"

Question 4: Interpretation of model coefficients

Intercept: It is the value of log(rdi4p + 1) when all other values are zero.
Waist: When values of all other input variables are fixed the value of log(rdi4p + 1) would increase by 0.007058175 for a unit increase in waist.
Age:When values of all other input variables are fixed the value of log(rdi4p + 1) would increase by 0.019982473 for a unit increase in age.
Gender:When values of all other input variables are fixed the value of log(rdi4p + 1) would increase by 0.517820827 for a unit increase in age.This implies that if the person is male(gender= 0) there will be no change in output but if the person is feamle(gender = 1) the ouput will increase by 0.517820827 if all other input variables remain constant.
BMI:When values of all other input variables are fixed the value of log(rdi4p + 1) would increase by 0.063067594 for a unit increase in BMI.

Question 5: Logistic Regression

# Question 5  
# Add a new column to the dataframe defining the categories of rdi4p , based on the definitions provided
# Categorical Variables
# function to find the category of rdi4p
category_rdi4p = function(rdi4p){
  if (rdi4p< 7){ category = "Normal Range"}
  else if (rdi4p >= 7 & rdi4p <15 ){ category = "Mild Sleep Apnea "}
  else if (rdi4p >= 15 & rdi4p <30){ category = " Sleep Apnea "}
  else  if (rdi4p >= 30){ category = " Severe Sleep Apnea "}
}
# Computing the categories
shhs$rdi4p_categories = unlist(lapply(shhs$rdi4p, category_rdi4p))

# Logistic regression
model_q5 = glm(HTNDerv_s1~ rdi4p_categories, data = shhs, family = "binomial")
model_q5_coef = coef(model_q5)
summary(model_q5)
## 
## Call:
## glm(formula = HTNDerv_s1 ~ rdi4p_categories, family = "binomial", 
##     data = shhs)
## 
## Deviance Residuals: 
##     Min       1Q   Median       3Q      Max  
## -1.3328  -0.9805  -0.9805   1.2007   1.3880  
## 
## Coefficients:
##                                   Estimate Std. Error z value Pr(>|z|)    
## (Intercept)                         0.3581     0.1086   3.296 0.000979 ***
## rdi4p_categories Sleep Apnea       -0.3703     0.1338  -2.767 0.005665 ** 
## rdi4p_categoriesMild Sleep Apnea   -0.4126     0.1242  -3.321 0.000897 ***
## rdi4p_categoriesNormal Range       -0.8407     0.1138  -7.390 1.47e-13 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## (Dispersion parameter for binomial family taken to be 1)
## 
##     Null deviance: 7921.7  on 5803  degrees of freedom
## Residual deviance: 7824.8  on 5800  degrees of freedom
## AIC: 7832.8
## 
## Number of Fisher Scoring iterations: 4
print("The model coefficients are:" )
## [1] "The model coefficients are:"
print(model_q5_coef)
##                       (Intercept)     rdi4p_categories Sleep Apnea  
##                         0.3580629                        -0.3702954 
## rdi4p_categoriesMild Sleep Apnea       rdi4p_categoriesNormal Range 
##                        -0.4126219                        -0.8407338

Question 6: Logistic Regression with additional variables

# Question 6  

model_q6 = glm(HTNDerv_s1~ rdi4p_categories + waist + age_s1 + gender + bmi_s1+ smokstat_s1, data = shhs, family = "binomial")
model_q6_coef = coef(model_q6)
summary(model_q6)
## 
## Call:
## glm(formula = HTNDerv_s1 ~ rdi4p_categories + waist + age_s1 + 
##     gender + bmi_s1 + smokstat_s1, family = "binomial", data = shhs)
## 
## Deviance Residuals: 
##     Min       1Q   Median       3Q      Max  
## -1.9029  -1.0234  -0.6827   1.1240   2.1408  
## 
## Coefficients:
##                                    Estimate Std. Error z value Pr(>|z|)
## (Intercept)                       -5.122034   0.338140 -15.148  < 2e-16
## rdi4p_categories Sleep Apnea      -0.341663   0.143284  -2.385 0.017102
## rdi4p_categoriesMild Sleep Apnea  -0.303458   0.133779  -2.268 0.023308
## rdi4p_categoriesNormal Range      -0.452920   0.126855  -3.570 0.000356
## waist                              0.004466   0.003620   1.234 0.217298
## age_s1                             0.055657   0.002937  18.952  < 2e-16
## gender                             0.012453   0.065102   0.191 0.848310
## bmi_s1                             0.044391   0.009553   4.647 3.37e-06
## smokstat_s1                       -0.023995   0.031149  -0.770 0.441097
##                                      
## (Intercept)                       ***
## rdi4p_categories Sleep Apnea      *  
## rdi4p_categoriesMild Sleep Apnea  *  
## rdi4p_categoriesNormal Range      ***
## waist                                
## age_s1                            ***
## gender                               
## bmi_s1                            ***
## smokstat_s1                          
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## (Dispersion parameter for binomial family taken to be 1)
## 
##     Null deviance: 7380.2  on 5383  degrees of freedom
## Residual deviance: 6837.8  on 5375  degrees of freedom
##   (420 observations deleted due to missingness)
## AIC: 6855.8
## 
## Number of Fisher Scoring iterations: 4
print("The model coefficients are:" )
## [1] "The model coefficients are:"
print(model_q6_coef)
##                       (Intercept)     rdi4p_categories Sleep Apnea  
##                      -5.122033742                      -0.341662580 
## rdi4p_categoriesMild Sleep Apnea       rdi4p_categoriesNormal Range 
##                      -0.303457620                      -0.452920307 
##                             waist                            age_s1 
##                       0.004465803                       0.055657331 
##                            gender                            bmi_s1 
##                       0.012452504                       0.044390888 
##                       smokstat_s1 
##                      -0.023995221

Results interpretation: The model chooses only 3 categorical variables (Sleep Apnea, Mild Sleep Apnea, Normal Range). Thus the presence or absence of Severe Sleep Apnea does not have any impact on hypertension.

Intercept: If all input variables are zero the value of hypertension is -5.122033742.

When all other input variables are kept constant a unit increase in Sleep Apnea relatively decreases hypertension by 0.341662580.
When all other input variables are kept constant a unit increase in Mild Sleep Apnea relatively decreases hypertension by 303457620.
When all other input variables are kept constant a unit increase in Normal Range relatively decreases hypertension by 0.452920307.
When all other input variables are kept constant a unit increase in waist relatively increases hypertension by 0.004465803.
When all other input variables are kept constant a unit increase in age relatively increases hypertension by 0.055657331.
When all other input variables are kept constant a unit increase in smokstat_s1 relatively decreases hypertension by 0.023995221.

Question 7: Repeating linear regression model with neural network

Estimation of log(rdi4p+1) using regression variables age, waist, gender and bmi (as given in question 2)

# Question 7 
set.seed(123)
# Choose the variables of interest
data_nn = shhs %>% select(log_rdi4p ,waist , age_s1 ,gender , bmi_s1)
data_nn = na.omit(data_nn)
# Define output and input variables
data_nn_y = data_nn$log_rdi4p
data_nn_x = data_nn %>% select(-log_rdi4p) %>% as.matrix()
# Split training (70% ) and testing (30%)
train_index = sample(c(TRUE, FALSE), length(data_nn_y), replace = TRUE, prob = c(.7, .3))
train_y = data_nn_y[train_index]
test_y = data_nn_y[!train_index]

train_x = data_nn_x[train_index, ] 
test_x = data_nn_x[!train_index, ] 
# Scale and centre the input data
mean    <- colMeans(train_x)
std     <- apply(train_x, 2, sd)
train_x <- scale(train_x, center = mean, scale = std)
test_x <- scale(test_x, center = mean, scale = std)

# Set up the model
model <- keras_model_sequential() %>%
  # network architecture
  layer_dense(units = 16,activation = "relu",use_bias = TRUE,  input_shape = ncol(train_x)) %>%
  layer_dense(units = 4,activation = "relu") %>%
  layer_dense(units = 2,activation = "relu") %>%
  layer_dense(units = 1)
# Print model summary
summary(model)
## ___________________________________________________________________________
## Layer (type)                     Output Shape                  Param #     
## ===========================================================================
## dense (Dense)                    (None, 16)                    80          
## ___________________________________________________________________________
## dense_1 (Dense)                  (None, 4)                     68          
## ___________________________________________________________________________
## dense_2 (Dense)                  (None, 2)                     10          
## ___________________________________________________________________________
## dense_3 (Dense)                  (None, 1)                     3           
## ===========================================================================
## Total params: 161
## Trainable params: 161
## Non-trainable params: 0
## ___________________________________________________________________________
# backpropagation
model %>% compile(
    optimizer = optimizer_rmsprop(),
    loss = "mse",
    metrics =  list("mean_absolute_error")
    )

# Training the model
learn <- model %>% fit(
  x = train_x,
  y = train_y,
  epochs = 40,
  batch_size = 32,
  validation_split = .2,
  verbose = 1

)
# Print training output 
print("Trained Model: ")
## [1] "Trained Model: "
learn
## Trained on 3,043 samples, validated on 761 samples (batch_size=32, epochs=40)
## Final epoch (plot to see history):
##                val_loss: 0.7403
## val_mean_absolute_error: 0.6979
##                    loss: 0.8391
##     mean_absolute_error: 0.7453
## compare with the test data
pred_nn = model %>% predict(test_x)

## compare with ordinary regression
fit = lm(train_y ~ train_x)
pred_lm =  cbind(1, test_x) %*% coef(fit)
cor_nn = cor(pred_nn, test_y)
cor_lm = cor(pred_lm, test_y)
# 
print(paste("Correlation with the test set for neural network, ",cor_nn))
## [1] "Correlation with the test set for neural network,  0.518543024505331"
print(paste("Correlation with the test set for linear regression, ",cor_lm))
## [1] "Correlation with the test set for linear regression,  0.514075641770967"

**Note: Reported is the best performing neural network after tuning for hyperparamters such as epochs and multiple layers

Question 8: Repeating logistic regression model with neural network

Estimation of HTNDerv_s1 using regression variables rdi4p (categorical), age, waist, gender , bmi, smoke_stat_s1 (as given in question 6)

# Question 8
set.seed(123)
# Choose the variables of interest
data_nn = shhs %>% select(rdi4p, waist, HTNDerv_s1, gender, age_s1, bmi_s1)
data_nn = na.omit(data_nn)
# Convert output to a binary output variable
data_nn_y = data_nn$HTNDerv_s1
data_nn_y_binary  = to_categorical(data_nn_y,2)
 # Define  input variables
data_nn_x = data_nn %>% select(-HTNDerv_s1) %>% as.matrix()
# Split training (70% ) and testing (30%)
train_index = sample(c(TRUE, FALSE), dim(data_nn_x )[1], replace = TRUE, prob = c(.7, .3))

train_y = data_nn_y_binary [train_index, ]
train_x = data_nn_x[train_index, ] %>% scale()

test_y = data_nn_y_binary [!train_index, ]
test_x = data_nn_x[!train_index, ] %>% scale()
# Scale and centre the input data
mean    <- colMeans(train_x)
std     <- apply(train_x, 2, sd)
train_x <- scale(train_x, center = mean, scale = std)
test_x <- scale(test_x, center = mean, scale = std)

# Set up the model
model = keras_model_sequential() %>%
  layer_dense(units = 2^8, activation = "relu", input_shape = ncol(train_x)) %>% 
  layer_dropout(rate = 0.4) %>%
  layer_dense(units = 2^4, activation = "relu") %>%
  layer_dropout(rate = 0.3) %>%
  layer_dense(units = 2, activation = "softmax")
summary(model)
## ___________________________________________________________________________
## Layer (type)                     Output Shape                  Param #     
## ===========================================================================
## dense_4 (Dense)                  (None, 256)                   1536        
## ___________________________________________________________________________
## dropout (Dropout)                (None, 256)                   0           
## ___________________________________________________________________________
## dense_5 (Dense)                  (None, 16)                    4112        
## ___________________________________________________________________________
## dropout_1 (Dropout)              (None, 16)                    0           
## ___________________________________________________________________________
## dense_6 (Dense)                  (None, 2)                     34          
## ===========================================================================
## Total params: 5,682
## Trainable params: 5,682
## Non-trainable params: 0
## ___________________________________________________________________________
# backpropagation
model %>% compile(
  optimizer = optimizer_rmsprop(),
  loss = "categorical_crossentropy",
  metrics =  list("accuracy")
)
# Training the model
learn <- model %>% fit(
  x = train_x,
  y = train_y,
  epochs = 40,
  batch_size = 32,
  validation_split = .2,
  verbose = 1
)

# Print the trained model
learn
## Trained on 3,043 samples, validated on 761 samples (batch_size=32, epochs=40)
## Final epoch (plot to see history):
## val_loss: 0.6367
##  val_acc: 0.6216
##     loss: 0.6281
##      acc: 0.6477
## compare with the test data
print(" Model performance on test data")
## [1] " Model performance on test data"
acc_nn=model %>% evaluate(test_x, test_y)

## compare with ordinary logistic regression
fit = glm(HTNDerv_s1 ~ ., family = "binomial", data = data_nn, subset = train_index)
pred_lg =  (predict(fit, data_nn[!train_index,], type = "response") > 0.5) * 1
 
ptab2 = table(pred_lg, test_y[,1])
acc_lg = sum(diag(ptab2)) / sum(ptab2)

print(paste("Correlation with the test set for neural network, ",acc_nn$acc))
## [1] "Correlation with the test set for neural network,  0.609801488981649"
print(paste("Correlation with the test set for logistic regression, ",acc_lg))
## [1] "Correlation with the test set for logistic regression,  0.387096774193548"

**Note: Reported is the best performing neural network after tuning for hyperparamters such as epochs and multiple layers

Question 9: MNIST example

# Question 9
library(keras)
mnist <- dataset_mnist()
x_train <- mnist$train$x
y_train <- mnist$train$y
x_test <- mnist$test$x
y_test <- mnist$test$y

# reshape
x_train <- array_reshape(x_train, c(nrow(x_train), 784))
x_test <- array_reshape(x_test, c(nrow(x_test), 784))
# rescale
x_train <- x_train / 255
x_test <- x_test / 255

y_train <- to_categorical(y_train, 10)
y_test <- to_categorical(y_test, 10)

model <- keras_model_sequential() 
model %>% 
  layer_dense(units = 256, activation = 'relu', input_shape = c(784)) %>% 
  layer_dropout(rate = 0.4) %>% 
  layer_dense(units = 128, activation = 'relu') %>%
  layer_dropout(rate = 0.3) %>%
  layer_dense(units = 10, activation = 'softmax')
summary(model)
## ___________________________________________________________________________
## Layer (type)                     Output Shape                  Param #     
## ===========================================================================
## dense_7 (Dense)                  (None, 256)                   200960      
## ___________________________________________________________________________
## dropout_2 (Dropout)              (None, 256)                   0           
## ___________________________________________________________________________
## dense_8 (Dense)                  (None, 128)                   32896       
## ___________________________________________________________________________
## dropout_3 (Dropout)              (None, 128)                   0           
## ___________________________________________________________________________
## dense_9 (Dense)                  (None, 10)                    1290        
## ===========================================================================
## Total params: 235,146
## Trainable params: 235,146
## Non-trainable params: 0
## ___________________________________________________________________________
model %>% compile(
  loss = 'categorical_crossentropy',
  optimizer = optimizer_rmsprop(),
  metrics = c('accuracy')
)

history <- model %>% fit(
  x_train, y_train, 
  epochs = 30, batch_size = 128, 
  validation_split = 0.2
)

plot(history)

# Evaluate the model’s performance on the test data
model %>% evaluate(x_test, y_test)
## $loss
## [1] 0.09874859
## 
## $acc
## [1] 0.9817
#Generate predictions on new data
model %>% predict_classes(x_test)
##     [1] 7 2 1 0 4 1 4 9 5 9 0 6 9 0 1 5 9 7 3 4 9 6 6 5 4 0 7 4 0 1 3 1 3 4
##    [35] 7 2 7 1 2 1 1 7 4 2 3 5 1 2 4 4 6 3 5 5 6 0 4 1 9 5 7 8 9 3 7 4 6 4
##    [69] 3 0 7 0 2 9 1 7 3 2 9 7 7 6 2 7 8 4 7 3 6 1 3 6 9 3 1 4 1 7 6 9 6 0
##   [103] 5 4 9 9 2 1 9 4 8 7 3 9 7 4 4 4 9 2 5 4 7 6 7 9 0 5 8 5 6 6 5 7 8 1
##   [137] 0 1 6 4 6 7 3 1 7 1 8 2 0 3 9 9 5 5 1 5 6 0 3 4 4 6 5 4 6 5 4 5 1 4
##   [171] 4 7 2 3 2 7 1 8 1 8 1 8 5 0 8 9 2 5 0 1 1 1 0 9 0 3 1 6 4 2 3 6 1 1
##   [205] 1 3 9 5 2 9 4 5 9 3 9 0 3 5 5 5 7 2 2 7 1 2 8 4 1 7 3 3 8 8 7 9 2 2
##   [239] 4 1 5 9 8 7 2 3 0 2 4 2 4 1 9 5 7 7 2 8 2 0 8 5 7 7 9 1 8 1 8 0 3 0
##   [273] 1 9 9 4 1 8 2 1 2 9 7 5 9 2 6 4 1 5 8 2 9 2 0 4 0 0 2 8 4 7 1 2 4 0
##   [307] 2 7 4 3 3 0 0 3 1 9 6 5 2 5 9 7 9 3 0 4 2 0 7 1 1 2 1 5 3 3 9 7 8 6
##   [341] 3 6 1 3 8 1 0 5 1 3 1 5 5 6 1 8 5 1 7 9 4 6 2 2 5 0 6 5 6 3 7 2 0 8
##   [375] 8 5 4 1 1 4 0 7 3 7 6 1 6 2 1 9 2 8 6 1 9 5 2 5 4 4 2 8 3 8 2 4 5 0
##   [409] 3 1 7 7 5 7 9 7 1 9 2 1 4 2 9 2 0 4 9 1 4 8 1 8 4 5 9 8 8 3 7 6 0 0
##   [443] 3 0 2 0 6 9 9 3 3 3 2 3 9 1 2 6 8 0 5 6 6 6 3 8 8 2 7 5 8 9 6 1 8 4
##   [477] 1 2 5 9 1 9 7 5 4 0 8 9 9 1 0 5 2 3 7 8 9 4 0 6 3 9 5 2 1 3 1 3 6 5
##   [511] 7 4 2 2 6 3 2 6 5 4 8 9 7 1 3 0 3 8 3 1 9 3 4 4 6 4 2 1 8 2 5 4 8 8
##   [545] 4 0 0 2 3 2 7 7 0 8 7 4 4 7 9 6 9 0 9 8 0 4 6 0 6 3 5 4 8 3 3 9 3 3
##   [579] 3 7 8 0 2 2 1 7 0 6 5 4 3 8 0 9 6 3 8 0 9 9 6 8 6 8 5 7 8 6 0 2 4 0
##   [613] 2 2 3 1 9 7 5 1 0 8 4 6 2 6 7 9 3 2 9 8 2 2 9 2 7 3 5 9 1 8 0 2 0 5
##   [647] 2 1 3 7 6 7 1 2 5 8 0 3 7 1 4 0 9 1 8 6 7 7 4 3 4 9 1 9 5 1 7 3 9 7
##   [681] 6 9 1 3 7 8 3 3 6 7 2 4 5 8 5 1 1 4 4 3 1 0 7 7 0 7 9 4 4 8 5 5 4 0
##   [715] 8 2 1 0 8 4 8 0 4 0 6 1 9 3 2 6 7 2 6 9 3 1 4 6 2 5 9 2 0 6 2 1 7 3
##   [749] 4 1 0 5 4 3 1 1 7 4 9 9 4 8 4 0 2 4 5 1 1 6 4 7 1 9 4 2 4 1 5 5 3 8
##   [783] 3 1 4 5 6 8 9 4 1 5 3 8 0 3 2 5 1 2 8 3 4 4 0 8 8 3 3 1 2 3 5 9 6 3
##   [817] 2 6 1 3 6 0 7 2 1 7 1 4 2 4 2 1 7 9 6 1 1 2 4 8 1 7 7 4 8 0 7 3 1 3
##   [851] 1 0 7 7 0 3 5 5 2 7 6 6 9 2 8 3 5 2 2 5 6 0 8 2 9 2 8 8 8 8 7 4 9 5
##   [885] 0 6 6 3 2 1 3 2 2 9 3 0 0 5 7 8 3 4 4 6 0 2 9 1 4 7 4 7 3 9 8 8 4 7
##   [919] 1 2 1 2 2 3 2 3 2 3 9 1 7 4 0 3 5 5 8 6 3 2 6 7 6 6 3 2 7 9 1 1 7 4
##   [953] 6 4 9 5 1 3 3 4 7 8 9 1 1 0 9 1 4 4 5 4 0 6 2 2 3 1 5 1 2 0 3 8 1 2
##   [987] 6 7 1 6 2 3 9 0 1 2 2 0 8 9 9 0 2 5 1 9 7 8 1 0 4 1 7 9 5 4 2 6 8 1
##  [1021] 3 7 5 4 4 1 8 1 3 8 1 2 5 8 0 6 2 1 1 7 1 5 3 4 6 9 5 0 9 2 2 4 8 2
##  [1055] 1 7 2 4 9 4 4 0 3 9 2 2 3 3 8 3 5 7 3 5 8 1 2 4 4 6 4 9 5 1 0 6 9 5
##  [1089] 9 5 9 7 3 8 0 3 7 1 3 6 7 8 5 9 7 9 6 9 6 3 7 4 6 5 3 5 4 7 8 7 8 0
##  [1123] 7 6 8 8 7 3 7 1 9 5 2 7 3 5 1 1 2 1 4 7 4 7 5 4 5 4 0 8 3 6 9 6 0 2
##  [1157] 7 4 4 4 4 6 6 4 7 9 3 4 5 5 8 7 3 7 2 7 0 2 4 1 1 6 8 9 2 8 7 2 0 1
##  [1191] 5 0 9 1 7 0 6 0 8 6 8 1 8 0 3 3 7 2 3 6 2 1 6 1 1 3 7 9 0 8 0 5 4 0
##  [1225] 4 8 2 2 9 8 4 0 9 5 8 5 1 2 1 3 1 7 4 5 7 2 0 5 8 8 6 2 5 4 1 9 2 1
##  [1259] 5 8 7 0 2 4 4 3 6 8 8 2 4 0 5 0 4 4 7 9 3 4 1 5 9 7 3 5 8 8 0 5 3 3
##  [1293] 6 6 0 1 6 0 3 7 4 4 1 2 9 1 4 6 9 9 3 9 8 4 4 3 1 3 1 3 8 7 9 4 8 8
##  [1327] 7 9 9 1 4 5 6 0 5 2 2 2 1 5 5 2 4 9 6 2 7 7 2 2 1 1 2 8 3 7 2 4 1 7
##  [1361] 1 7 6 7 2 2 7 3 1 7 5 8 2 6 2 2 5 6 5 0 9 2 4 3 3 9 7 6 6 8 0 4 1 3
##  [1395] 8 3 9 1 8 0 6 7 2 1 0 5 5 2 0 2 2 0 2 4 9 8 0 9 9 4 6 5 4 9 1 8 3 4
##  [1429] 9 9 1 2 2 8 1 9 6 4 0 9 4 8 3 8 6 0 2 5 1 9 6 2 9 4 0 9 6 0 6 2 5 4
##  [1463] 2 3 8 4 5 5 0 3 8 5 3 5 8 6 5 7 6 3 3 9 6 1 1 2 9 0 4 3 3 6 9 5 7 3
##  [1497] 7 7 7 8 7 9 8 3 0 7 2 7 9 4 5 4 9 3 2 1 4 0 2 3 7 5 9 8 8 5 0 3 1 4
##  [1531] 7 3 9 0 0 0 6 6 2 3 7 8 4 7 7 9 2 4 1 6 5 2 4 9 9 1 8 4 0 9 8 4 8 7
##  [1565] 7 0 7 8 8 6 0 4 8 8 2 4 7 6 6 6 4 7 1 8 8 2 3 6 3 0 0 3 7 6 9 7 9 9
##  [1599] 5 4 3 3 6 1 2 3 7 3 3 2 0 3 3 8 4 3 6 3 5 0 2 0 9 0 7 4 6 9 3 5 1 9
##  [1633] 6 1 4 5 4 5 0 5 9 6 2 1 2 9 1 9 9 4 0 8 4 5 2 9 2 1 2 1 7 3 6 8 8 4
##  [1667] 9 1 9 8 5 7 5 1 1 8 6 5 2 4 4 7 2 3 5 6 8 8 6 2 3 1 0 5 8 9 2 9 6 7
##  [1701] 0 4 8 7 1 7 4 1 0 3 7 2 0 0 9 1 7 0 7 8 4 7 7 0 4 6 0 3 1 1 3 3 9 6
##  [1735] 7 4 1 5 3 0 8 7 3 9 6 9 3 5 0 2 7 2 5 1 2 5 8 0 8 8 1 5 0 3 0 3 1 4
##  [1769] 0 3 7 2 7 1 8 0 7 0 4 3 1 9 8 7 7 1 4 9 9 3 7 1 7 9 0 2 0 3 3 7 4 9
##  [1803] 2 3 3 7 7 0 0 7 5 2 9 8 7 4 4 2 6 6 1 9 6 8 2 9 0 8 3 1 1 6 3 5 1 1
##  [1837] 1 3 1 2 3 0 2 0 1 3 5 5 7 4 8 9 6 9 6 8 3 6 6 8 5 1 4 2 4 4 5 1 1 9
##  [1871] 0 2 4 9 5 7 1 8 3 5 6 9 8 7 1 1 6 7 6 3 2 2 0 8 9 2 5 1 0 8 1 4 5 7
##  [1905] 9 6 9 0 6 1 5 5 8 3 8 2 6 5 0 7 4 6 1 3 4 7 3 2 3 4 2 5 2 7 1 7 2 6
##  [1939] 4 1 5 7 8 6 0 1 8 2 5 7 7 6 9 3 5 8 4 2 4 0 8 8 3 4 9 2 7 5 8 6 5 6
##  [1973] 0 8 6 7 3 6 4 9 4 6 6 3 2 4 1 0 1 4 6 2 9 1 1 0 6 3 9 5 6 5 6 5 9 4
##  [2007] 6 4 3 9 1 3 4 1 9 1 7 1 7 9 3 5 4 0 7 3 6 1 7 5 5 3 3 0 1 3 7 5 8 6
##  [2041] 5 1 0 8 2 3 4 6 7 9 8 1 8 9 9 2 8 6 2 7 0 0 6 7 5 8 6 0 9 3 7 1 3 5
##  [2075] 4 3 3 5 5 6 3 0 2 3 4 2 3 0 9 9 4 7 2 8 4 7 0 6 2 8 5 2 8 5 7 3 0 8
##  [2109] 2 7 2 8 2 5 5 7 6 4 0 8 4 8 2 7 4 9 2 0 3 9 9 6 7 2 5 1 1 1 2 3 6 7
##  [2143] 8 7 6 4 8 9 4 8 6 3 8 3 1 0 6 2 2 5 6 9 3 8 1 4 1 7 8 4 6 1 8 4 3 1
##  [2177] 2 8 0 8 5 9 1 4 2 0 2 7 0 9 0 2 5 7 6 7 9 4 2 6 2 4 4 8 0 4 4 5 8 0
##  [2211] 6 8 9 8 5 6 9 0 4 8 7 1 3 4 5 8 0 9 1 3 3 6 9 8 7 1 0 5 7 1 7 5 2 7
##  [2245] 9 1 8 5 2 4 9 4 7 2 2 3 4 9 1 9 2 1 7 9 4 4 1 6 7 2 7 8 8 1 9 7 1 1
##  [2279] 7 5 3 3 5 1 3 7 6 1 3 8 7 3 9 0 0 0 2 8 8 2 3 7 1 3 0 3 4 4 3 8 9 2
##  [2313] 3 9 7 1 1 7 0 4 9 6 5 9 1 7 0 2 0 2 4 6 7 0 7 1 4 6 4 5 4 9 9 1 7 9
##  [2347] 5 3 3 8 2 3 6 2 2 1 1 1 1 1 6 9 8 4 3 7 1 6 4 8 0 4 7 4 2 4 0 7 0 1
##  [2381] 9 8 8 6 0 0 4 1 6 8 2 2 3 8 4 8 2 2 1 7 5 4 4 0 4 3 9 7 3 1 0 1 2 5
##  [2415] 9 2 1 0 1 8 9 1 4 8 3 8 9 3 6 2 8 3 2 2 1 0 4 2 9 2 4 3 7 9 1 5 2 9
##  [2449] 9 0 3 8 5 3 5 0 9 4 6 2 5 0 2 7 4 6 6 8 6 6 8 6 9 1 7 2 5 9 9 0 7 2
##  [2483] 7 6 7 0 6 5 4 4 7 2 0 9 9 2 2 9 4 4 2 3 3 2 1 7 0 7 6 4 1 3 8 7 4 5
##  [2517] 9 2 5 1 8 7 3 7 1 5 5 0 9 1 4 0 6 3 3 6 0 4 9 7 5 1 6 8 9 5 5 7 9 3
##  [2551] 8 3 8 1 5 3 5 0 5 5 3 8 6 7 7 7 3 7 0 5 9 0 2 5 5 3 1 7 7 8 6 5 9 3
##  [2585] 8 9 5 3 7 9 1 7 0 0 3 7 2 5 8 1 8 6 2 9 5 7 5 7 8 6 2 5 1 4 8 4 5 8
##  [2619] 3 0 6 2 7 3 3 2 1 0 7 3 4 0 3 9 3 7 8 9 0 3 8 0 7 6 5 4 7 3 0 0 8 6
##  [2653] 2 5 1 1 0 0 4 4 0 1 2 3 2 7 7 8 5 2 5 7 6 9 1 4 1 6 4 2 4 3 5 4 3 9
##  [2687] 5 0 1 5 3 8 9 1 9 7 9 5 5 2 7 4 6 0 1 1 1 0 4 4 7 6 3 0 0 4 3 0 6 1
##  [2721] 9 6 1 3 8 1 2 5 6 2 7 3 6 0 1 9 7 6 6 8 9 2 9 5 8 3 1 0 0 7 6 6 2 1
##  [2755] 6 9 3 1 8 6 9 0 6 0 0 0 6 3 5 9 3 4 5 5 8 5 3 0 4 0 2 9 6 8 2 3 1 2
##  [2789] 1 1 5 6 9 8 0 6 6 5 5 3 8 6 2 1 4 5 4 3 7 8 3 0 9 3 5 1 1 0 4 4 7 0
##  [2823] 1 7 0 1 6 1 4 5 6 6 5 7 8 4 4 7 2 5 3 7 0 7 7 9 6 4 2 8 5 7 8 3 9 5
##  [2857] 8 9 9 8 6 2 8 9 2 3 6 1 1 8 9 3 4 0 7 9 6 9 1 4 1 3 4 9 3 1 4 7 7 4
##  [2891] 7 2 9 3 0 8 8 8 4 0 4 4 1 5 2 8 3 4 9 5 2 8 1 5 3 7 9 4 2 5 6 8 5 9
##  [2925] 3 5 9 2 1 9 5 3 0 6 9 8 4 0 4 7 2 9 0 1 0 3 1 6 5 8 1 5 3 5 0 3 5 5
##  [2959] 9 2 8 7 0 4 9 1 9 7 7 5 3 2 0 9 1 8 6 2 3 9 6 2 1 9 1 3 5 5 0 3 8 3
##  [2993] 3 7 6 6 0 1 4 0 6 9 8 1 2 9 9 5 9 7 3 7 8 0 1 3 0 4 6 1 0 2 5 8 4 4
##  [3027] 1 1 5 4 8 6 0 6 9 2 6 2 7 1 7 9 4 0 0 3 8 2 2 3 1 6 0 5 7 7 9 2 6 7
##  [3061] 7 7 8 6 8 8 4 6 8 4 1 2 8 2 3 9 4 0 3 7 3 2 3 3 7 3 4 0 6 2 0 8 1 5
##  [3095] 3 5 4 1 7 1 5 7 5 7 3 2 2 7 3 7 3 7 8 5 4 5 2 9 6 5 3 6 7 4 1 7 1 5
##  [3129] 2 3 6 3 1 4 2 6 7 4 3 8 0 6 2 1 6 5 3 9 1 9 3 2 1 8 4 4 6 5 8 6 9 7
##  [3163] 7 8 6 9 7 3 9 4 0 5 4 6 4 1 2 3 0 0 2 6 6 5 7 0 8 6 4 7 9 0 7 3 4 2
##  [3197] 1 8 8 5 9 2 7 1 8 8 8 2 7 6 0 1 2 7 1 0 8 3 6 0 5 3 6 2 8 9 0 1 4 2
##  [3231] 1 1 4 4 4 4 7 1 6 2 9 9 0 0 1 8 8 4 3 4 2 0 6 1 6 1 2 2 2 1 2 3 7 8
##  [3265] 1 0 0 2 1 6 6 0 1 6 2 5 1 7 4 8 2 1 4 3 8 3 9 9 4 9 3 4 7 2 7 5 7 0
##  [3299] 4 3 3 2 6 7 6 0 0 6 7 7 0 5 5 8 1 0 7 0 2 8 1 5 0 8 8 0 3 2 7 7 2 6
##  [3333] 4 7 5 5 7 2 9 2 8 4 6 8 6 5 0 0 8 7 6 1 7 1 1 2 7 4 0 0 7 7 6 3 8 6
##  [3367] 4 2 0 9 4 0 5 7 8 2 7 4 7 1 1 3 6 6 2 9 1 9 4 8 3 6 9 5 9 6 2 4 6 7
##  [3401] 7 0 6 6 9 4 8 3 5 3 4 9 0 0 5 2 5 0 7 1 1 1 6 7 6 7 9 6 6 4 1 4 3 1
##  [3435] 1 2 2 4 1 0 8 7 6 3 4 0 0 6 3 3 0 7 1 7 1 1 3 1 0 9 4 7 5 4 1 4 8 9
##  [3469] 5 3 5 1 9 8 2 7 3 9 9 0 1 0 2 9 3 9 3 3 6 2 9 9 8 3 7 4 0 4 7 8 4 9
##  [3503] 8 1 9 7 5 9 2 8 2 2 0 2 2 3 8 4 6 8 4 8 2 4 6 7 9 3 3 9 4 3 1 4 4 7
##  [3537] 0 5 9 6 0 4 4 4 4 6 1 2 3 2 6 4 5 9 6 8 5 6 0 5 6 4 1 8 6 5 2 5 4 5
##  [3571] 5 4 7 7 0 7 8 2 2 3 7 0 1 8 0 7 1 9 8 7 5 5 9 1 7 5 4 9 1 2 2 1 6 6
##  [3605] 7 1 1 4 0 7 4 2 4 0 6 4 7 6 9 5 3 4 6 5 0 1 8 8 2 8 3 5 7 8 0 8 5 7
##  [3639] 1 1 0 1 3 7 8 5 0 7 1 1 0 1 1 4 5 2 7 6 2 3 0 2 8 5 9 6 9 7 2 1 3 6
##  [3673] 4 1 8 2 4 0 5 1 0 3 2 6 4 4 3 9 6 1 6 5 7 9 2 0 2 6 0 1 4 3 5 2 8 8
##  [3707] 0 8 8 9 0 9 6 7 6 3 9 3 4 7 7 7 4 9 0 6 4 8 4 2 7 2 8 1 0 0 7 8 3 3
##  [3741] 3 1 3 7 6 1 3 1 6 6 5 2 4 7 5 9 5 8 4 9 9 1 6 5 0 1 3 7 0 3 4 8 2 2
##  [3775] 0 2 5 1 5 1 4 8 8 9 1 2 1 3 5 1 0 9 4 4 8 3 8 5 9 7 6 6 2 0 0 0 5 8
##  [3809] 3 1 5 2 3 8 5 1 8 2 0 4 9 9 6 2 3 3 5 6 4 8 0 9 2 8 3 6 7 5 7 2 9 4
##  [3843] 9 1 2 8 6 0 7 0 9 1 1 5 7 5 9 9 1 9 5 9 2 5 0 4 1 0 8 9 0 8 9 8 9 4
##  [3877] 2 5 7 9 8 9 8 0 9 9 6 8 9 9 5 9 8 6 1 0 3 3 5 2 1 6 3 0 2 8 3 5 6 2
##  [3911] 3 0 2 2 6 4 3 5 5 1 7 2 1 6 9 1 9 9 5 5 1 6 2 2 8 6 7 1 4 6 0 2 0 5
##  [3945] 3 2 2 3 6 8 9 8 5 3 8 5 4 5 2 0 5 6 3 2 8 3 9 9 5 7 9 4 6 7 1 3 7 3
##  [3979] 6 6 0 9 0 1 9 9 2 8 8 0 1 6 9 7 5 3 4 7 4 9 8 4 3 6 3 1 1 7 6 9 1 8
##  [4013] 4 1 1 9 9 4 3 6 8 1 6 0 4 1 3 7 7 4 9 5 1 0 0 1 1 6 2 1 9 8 4 0 3 6
##  [4047] 4 9 0 7 1 6 5 7 5 2 5 1 8 5 4 7 0 6 7 3 2 5 8 1 0 4 5 7 1 3 5 1 9 0
##  [4081] 0 6 0 7 3 1 8 3 9 7 0 0 8 9 5 9 8 3 2 7 2 9 7 2 1 1 3 7 5 3 1 9 8 2
##  [4115] 2 2 8 8 5 7 3 8 9 8 8 6 8 2 3 9 7 5 6 2 9 2 8 8 1 6 2 8 7 9 1 8 0 1
##  [4149] 7 2 0 7 5 1 9 0 2 0 9 8 6 2 3 9 3 8 0 2 1 1 1 1 4 2 9 7 7 5 1 1 2 1
##  [4183] 9 9 9 1 0 2 0 2 1 1 4 6 4 1 5 4 9 7 7 7 5 6 2 2 2 8 0 6 9 6 1 9 7 7
##  [4217] 1 4 8 5 3 4 3 4 7 7 5 0 7 4 8 8 1 5 3 9 5 9 7 6 9 0 3 6 3 9 8 2 8 1
##  [4251] 2 8 6 8 5 5 3 9 4 9 2 5 1 5 1 4 4 1 4 4 3 3 9 1 2 2 3 3 0 2 9 0 0 9
##  [4285] 9 6 0 9 3 7 8 4 1 9 9 7 2 7 9 9 5 9 5 1 1 8 3 5 1 9 5 3 5 4 9 5 9 3
##  [4319] 1 9 0 9 7 5 4 9 2 0 1 0 5 1 4 9 3 3 6 1 5 2 5 2 2 0 9 2 6 6 0 1 2 0
##  [4353] 3 0 2 5 5 7 9 5 3 0 8 9 5 0 3 2 5 9 0 8 8 4 5 8 8 4 5 4 8 5 9 9 2 2
##  [4387] 1 2 6 8 8 7 0 3 6 6 4 3 8 8 7 2 2 0 0 9 3 9 9 1 9 8 6 6 4 2 6 9 2 8
##  [4421] 5 4 5 7 9 9 9 2 1 8 3 4 0 7 8 7 9 3 4 6 5 6 2 3 9 2 6 0 0 6 1 2 8 7
##  [4455] 9 8 2 0 4 7 7 5 0 5 6 4 6 7 4 3 0 7 5 0 7 4 2 0 8 9 9 4 2 4 6 7 8 7
##  [4489] 6 9 4 1 3 7 3 0 8 7 7 6 1 3 9 2 2 9 2 1 8 3 2 9 6 8 4 0 1 2 8 4 5 2
##  [4523] 7 8 1 1 3 0 3 5 7 0 3 1 9 3 5 3 1 7 7 3 0 8 4 8 2 6 5 2 9 7 3 9 0 9
##  [4557] 9 6 4 2 9 7 2 1 1 6 7 4 7 5 9 8 8 2 1 4 4 5 7 6 1 3 2 5 9 9 3 6 1 1
##  [4591] 4 6 9 7 2 1 5 1 4 6 3 4 1 1 0 3 1 6 8 4 9 0 7 3 0 2 9 0 6 6 6 3 6 7
##  [4625] 7 2 8 6 0 8 3 0 2 9 8 3 2 5 3 8 8 0 0 1 9 5 1 3 9 6 0 1 4 1 7 1 2 3
##  [4659] 7 9 7 4 9 9 3 9 2 8 2 7 1 8 0 9 1 0 1 7 7 9 6 9 9 9 2 1 6 1 3 5 7 1
##  [4693] 9 7 6 4 5 7 6 6 9 9 6 3 6 2 9 8 1 2 2 5 5 2 3 7 2 1 0 1 0 4 5 2 8 2
##  [4727] 8 3 5 1 7 8 1 1 2 9 7 8 4 0 5 0 7 8 8 4 7 7 8 5 8 4 9 8 1 3 8 0 3 1
##  [4761] 7 7 5 5 1 6 5 7 4 9 3 5 4 7 1 2 0 8 1 6 0 7 3 4 7 3 9 6 0 8 6 4 8 7
##  [4795] 7 9 3 8 6 9 7 2 3 4 0 2 1 0 3 5 5 7 2 4 6 7 2 8 3 0 8 7 8 4 0 8 4 4
##  [4829] 5 8 5 6 6 3 0 9 3 7 6 8 9 3 4 9 5 8 9 1 2 8 8 6 8 1 3 7 9 0 1 1 9 7
##  [4863] 0 8 1 7 4 5 7 1 2 1 1 3 9 6 2 1 2 6 8 7 6 6 9 3 7 0 5 2 8 0 5 4 3 8
##  [4897] 4 6 6 2 7 9 5 1 3 2 4 3 6 1 9 4 4 7 6 5 4 1 9 9 2 7 8 0 1 3 6 1 3 4
##  [4931] 1 1 1 5 6 0 7 0 7 2 3 2 5 2 2 9 4 9 8 1 2 1 6 1 2 7 4 0 0 0 8 2 2 9
##  [4965] 2 2 7 9 9 2 7 5 1 3 4 9 4 1 8 5 6 2 8 3 1 2 8 4 9 9 3 7 0 7 7 2 3 2
##  [4999] 4 0 3 9 9 8 4 1 0 6 0 9 6 8 6 1 1 9 8 9 2 3 5 5 9 4 2 1 9 4 3 9 6 0
##  [5033] 4 0 6 0 1 2 3 4 7 8 9 0 1 2 3 4 7 8 9 0 1 2 3 4 5 6 7 8 9 8 3 4 7 8
##  [5067] 6 3 4 0 9 7 1 9 3 8 4 7 3 0 9 1 4 5 4 6 2 0 6 2 1 1 1 1 7 2 4 7 5 2
##  [5101] 9 4 5 8 4 2 9 7 0 0 7 5 1 1 7 6 6 6 8 2 2 7 7 4 0 2 4 2 1 8 9 6 1 0
##  [5135] 5 9 6 9 8 0 3 0 8 3 9 6 3 0 1 2 3 4 5 6 7 0 1 2 3 4 5 6 7 8 9 0 1 2
##  [5169] 3 4 5 6 7 8 5 4 8 7 4 7 7 3 9 8 8 3 1 5 8 2 7 4 2 1 5 4 5 5 8 6 4 4
##  [5203] 4 1 8 7 5 5 1 8 9 1 3 6 3 3 2 2 6 9 9 6 5 5 3 3 8 1 6 5 6 8 1 9 7 6
##  [5237] 8 3 7 4 7 0 9 0 0 3 7 9 3 0 2 0 1 0 1 0 4 0 1 0 4 7 9 6 2 6 2 2 9 9
##  [5271] 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 8 0 5 6
##  [5305] 6 0 8 0 2 3 7 9 4 7 1 9 1 7 1 4 0 0 4 1 7 5 7 1 3 3 3 6 6 9 7 4 3 0
##  [5339] 2 5 2 6 0 8 9 4 3 5 4 8 1 5 9 0 6 4 3 6 3 3 8 1 4 7 5 7 2 2 0 0 1 7
##  [5373] 7 9 5 9 8 9 6 8 8 2 3 6 1 2 9 8 9 5 2 6 2 4 8 4 6 5 0 1 5 6 7 8 9 0
##  [5407] 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 7 4 2 0 9 0 1 5 8 8 0 2 7 8 4
##  [5441] 4 6 1 0 4 5 3 9 4 2 0 5 0 1 3 2 9 1 6 0 1 1 8 0 4 7 7 6 3 6 0 7 3 5
##  [5475] 4 2 4 1 8 3 5 6 7 0 6 7 1 2 5 8 1 9 3 8 2 8 7 6 7 1 4 6 2 9 3 0 1 2
##  [5509] 3 4 5 6 7 0 1 2 3 4 5 0 1 2 8 9 1 4 0 9 5 0 8 0 7 7 1 1 2 9 3 6 7 2
##  [5543] 3 8 1 2 9 8 8 7 1 7 1 1 0 3 4 2 6 4 7 4 2 7 4 9 1 0 6 8 5 5 5 3 5 9
##  [5577] 7 4 8 5 9 6 9 3 0 3 8 9 1 8 1 6 0 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6
##  [5611] 7 8 9 0 1 2 3 4 5 6 7 8 9 3 5 3 2 9 3 2 1 4 5 5 2 3 2 1 3 9 7 2 5 2
##  [5645] 8 9 1 8 8 7 8 1 0 0 7 7 8 7 5 0 6 1 5 7 4 6 1 2 5 0 7 9 9 0 3 8 7 4
##  [5679] 8 1 8 6 5 9 0 0 0 3 7 1 6 4 2 6 6 0 4 5 4 1 3 8 6 3 9 9 5 9 3 7 8 5
##  [5713] 6 4 7 6 2 2 0 9 4 0 1 2 3 4 5 6 7 8 9 0 1 2 7 5 6 0 1 2 3 4 5 6 8 7
##  [5747] 1 3 2 8 0 7 5 9 9 6 0 9 4 1 3 2 1 2 3 8 3 2 6 5 6 8 2 7 4 8 1 8 0 5
##  [5781] 3 9 4 1 9 2 1 9 6 7 9 0 4 6 1 7 3 8 7 2 9 6 5 8 3 9 0 5 7 1 6 1 0 9
##  [5815] 3 3 4 4 0 6 2 5 4 2 3 4 6 0 0 2 0 1 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 0
##  [5849] 1 2 3 4 5 6 7 8 9 8 7 1 3 7 5 2 8 0 7 5 9 9 0 9 1 1 5 8 8 6 3 2 1 8
##  [5883] 3 2 6 5 6 0 4 1 0 5 3 1 9 2 1 9 6 0 4 6 1 7 3 8 7 2 9 6 5 8 3 5 7 1
##  [5917] 6 1 0 9 6 2 5 4 2 3 4 4 6 0 0 2 0 1 2 3 9 3 6 7 8 9 0 1 2 3 4 5 6 7
##  [5951] 8 9 0 1 2 8 4 5 6 7 8 9 8 6 5 0 6 8 9 4 1 9 5 9 0 4 8 9 1 4 0 9 5 2
##  [5985] 1 5 4 0 7 6 0 1 7 0 6 8 9 5 1 7 9 8 6 0 8 1 7 7 1 3 2 5 1 4 2 0 0 7
##  [6019] 8 4 6 4 9 3 8 4 7 2 5 6 3 6 9 6 3 2 2 4 6 9 0 2 5 5 1 3 3 9 7 8 7 2
##  [6053] 2 5 7 9 8 2 1 9 1 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3
##  [6087] 4 5 6 7 8 9 1 8 6 5 3 0 7 0 4 1 4 3 6 7 2 3 1 2 1 2 9 6 0 1 3 0 2 7
##  [6121] 5 7 6 2 9 1 9 0 6 0 6 0 2 0 6 1 5 8 4 3 0 1 5 4 4 8 5 7 5 7 8 3 4 8
##  [6155] 8 5 2 9 7 1 3 8 1 0 7 5 9 6 9 4 7 7 9 9 3 4 4 3 8 6 2 0 1 2 3 4 5 6
##  [6189] 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 8 3 9 5 5 2 6 8 4 9
##  [6223] 1 7 1 2 3 5 9 6 9 1 1 1 2 9 5 6 8 1 2 0 7 7 5 8 2 9 8 9 0 4 6 7 1 3
##  [6257] 4 5 6 0 3 6 8 7 0 4 2 7 4 7 5 4 3 4 2 8 1 5 1 2 0 2 5 6 4 3 0 0 0 3
##  [6291] 3 5 7 0 6 4 8 8 6 3 4 6 9 9 8 2 7 7 1 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4
##  [6325] 5 6 7 8 0 1 2 3 4 5 6 7 8 2 1 7 2 5 0 8 0 2 7 8 8 3 6 0 2 7 6 6 1 2
##  [6359] 8 8 7 7 4 7 7 3 7 4 5 4 3 3 8 4 1 1 9 7 4 3 7 3 3 0 2 5 5 6 6 3 5 2
##  [6393] 5 9 9 8 4 1 0 6 0 9 6 8 8 5 6 1 1 9 8 9 2 3 5 5 9 4 2 1 9 3 9 2 0 6
##  [6427] 0 4 0 0 1 2 3 4 7 8 9 0 1 2 3 7 8 9 0 1 2 3 4 7 8 9 7 3 0 3 1 8 7 6
##  [6461] 4 0 2 6 8 3 2 8 1 2 0 7 1 0 4 4 5 8 0 6 2 3 1 5 1 8 5 9 4 0 7 5 8 8
##  [6495] 3 8 9 2 6 2 5 3 1 7 3 9 1 9 9 6 0 3 9 2 8 1 4 3 5 2 9 2 5 8 9 5 0 1
##  [6529] 2 4 5 6 0 1 2 3 4 5 6 7 1 2 3 4 5 1 0 4 5 6 6 3 4 4 2 9 1 0 6 4 9 7
##  [6563] 2 3 3 9 2 0 9 3 3 7 1 5 6 3 1 7 8 4 0 2 4 0 2 4 7 8 0 7 0 6 9 3 2 8
##  [6597] 6 7 5 7 5 1 0 8 1 6 7 2 9 7 9 5 8 6 2 6 2 8 1 7 5 0 1 1 3 4 4 9 1 8
##  [6631] 6 8 9 0 1 2 3 4 5 6 7 5 9 0 1 2 3 4 7 8 9 8 1 7 8 9 9 8 9 8 4 1 7 7
##  [6665] 3 3 7 6 6 6 1 9 0 1 7 6 3 2 1 7 1 3 9 1 7 6 8 4 1 4 3 6 9 6 1 4 4 7
##  [6699] 2 4 4 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 9 0 1 2 3 4 7 8 1 3 5 1 7 7
##  [6733] 2 1 4 8 3 4 4 3 9 7 4 1 2 3 5 9 1 6 0 1 0 0 2 9 7 1 1 4 0 4 7 3 6 8
##  [6767] 0 3 7 4 0 6 9 2 6 5 8 6 9 0 4 0 6 6 9 2 0 9 5 1 3 7 6 9 3 0 2 2 0 1
##  [6801] 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 2 1 7 2 5 0
##  [6835] 8 0 2 7 8 8 3 0 6 0 2 7 6 4 1 2 8 8 7 7 4 7 7 3 7 4 5 4 3 3 8 4 5 4
##  [6869] 1 1 9 7 4 3 7 3 3 0 2 5 5 6 3 1 5 2 5 9 9 8 4 1 0 6 0 9 6 8 8 5 6 1
##  [6903] 1 9 8 9 2 3 5 5 9 4 2 1 9 4 9 1 3 9 2 0 6 0 4 0 6 0 1 2 3 4 5 6 7 8
##  [6937] 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 3 8 0 7 1 0 7 5 5 6 9 0 1
##  [6971] 0 0 8 3 4 3 1 5 0 0 9 5 3 4 9 3 7 6 9 2 4 5 7 2 6 4 9 4 9 4 1 2 2 5
##  [7005] 8 1 3 2 9 4 3 8 2 2 1 2 8 6 5 1 6 7 2 1 3 9 3 8 7 5 7 0 7 4 8 8 5 0
##  [7039] 6 6 3 7 6 9 9 4 8 4 1 0 6 6 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9
##  [7073] 0 1 2 3 4 5 6 7 8 9 7 4 0 4 0 1 7 9 5 1 4 2 8 9 4 3 7 8 2 4 4 3 3 6
##  [7107] 9 9 5 8 6 7 0 6 8 2 6 3 9 3 2 8 6 1 7 4 8 8 9 0 3 3 9 0 5 2 9 4 1 0
##  [7141] 3 7 5 8 7 7 8 2 9 7 1 2 6 4 2 5 2 3 6 6 5 0 0 2 8 1 6 1 0 4 3 1 6 1
##  [7175] 9 0 1 4 5 6 7 8 9 1 2 3 4 5 6 7 0 1 2 3 4 5 6 7 8 9 8 4 0 0 7 2 4 3
##  [7209] 8 6 6 3 2 6 3 3 5 1 4 7 8 0 3 1 9 0 1 9 1 2 7 0 1 3 8 2 9 2 7 6 5 5
##  [7243] 9 9 8 2 9 1 3 2 3 4 3 1 9 0 9 3 6 8 7 0 1 0 5 8 2 7 7 0 1 2 3 4 5 6
##  [7277] 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 1 7 4 8 1 5 6 5 7 2 8
##  [7311] 6 3 3 8 6 5 4 0 9 1 7 2 9 1 5 1 3 2 2 3 0 6 4 3 7 6 9 0 4 8 1 4 0 6
##  [7345] 1 2 6 9 2 2 3 5 5 1 0 7 7 9 6 2 9 4 7 0 2 3 4 0 0 8 8 8 5 1 3 7 4 9
##  [7379] 8 8 9 0 9 8 9 0 2 6 5 6 7 4 7 5 4 1 3 5 3 1 2 3 4 5 6 1 2 3 4 6 0 1
##  [7413] 2 4 5 6 7 8 1 7 2 4 1 4 1 4 9 6 8 4 5 3 7 8 4 3 3 5 6 7 0 6 1 6 8 7
##  [7447] 0 1 5 0 8 5 0 1 5 8 4 2 3 9 7 6 9 1 9 0 6 7 1 2 3 9 2 4 5 5 3 7 5 3
##  [7481] 1 8 2 2 3 0 2 9 4 9 7 0 2 7 4 9 9 2 5 9 8 3 8 6 7 0 0 1 2 3 4 5 6 7
##  [7515] 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 0 7 2 6 5 5 3 7 8 6 6
##  [7549] 6 6 4 3 8 8 3 0 1 9 0 5 4 1 9 1 2 7 0 1 3 8 2 9 2 7 4 2 6 5 5 9 9 1
##  [7583] 1 5 7 6 8 2 9 4 3 1 9 0 9 3 6 8 7 0 1 0 5 8 2 7 7 0 1 2 3 4 5 6 7 8
##  [7617] 9 0 1 2 3 4 5 8 9 0 1 2 3 4 5 6 7 8 9 2 1 2 1 3 9 9 8 5 3 7 0 7 7 5
##  [7651] 7 9 9 4 7 0 3 4 1 5 8 1 4 8 4 1 8 6 6 4 6 0 5 5 3 3 5 7 2 5 9 6 9 2
##  [7685] 6 2 1 2 0 8 3 8 3 0 8 7 4 9 5 0 9 7 0 0 4 6 0 9 1 6 2 7 6 8 3 5 2 1
##  [7719] 8 3 8 6 1 0 2 1 4 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4
##  [7753] 5 6 7 8 9 7 6 4 7 6 2 3 4 8 7 8 6 9 8 3 2 2 8 4 8 5 6 5 0 2 0 1 1 2
##  [7787] 9 6 8 2 1 0 6 5 2 9 7 5 3 9 3 7 1 8 3 8 1 9 5 5 0 1 1 9 8 2 6 0 4 5
##  [7821] 0 3 1 8 6 7 5 9 9 3 0 3 1 4 4 0 4 9 0 1 2 3 5 6 7 8 0 1 2 3 5 6 7 8
##  [7855] 9 0 1 2 3 5 6 7 8 9 9 7 0 9 0 1 5 8 8 0 9 3 2 7 8 4 6 1 0 4 9 4 2 0
##  [7889] 5 0 1 6 9 3 2 9 1 6 0 1 1 8 7 7 6 3 6 0 7 2 4 1 7 0 6 7 1 2 5 8 1 8
##  [7923] 2 8 7 6 8 7 1 6 2 9 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2
##  [7957] 3 4 5 6 7 8 9 8 9 5 7 0 3 1 6 8 4 1 5 6 4 2 7 8 1 3 4 3 4 7 2 0 5 0
##  [7991] 1 9 2 3 2 3 5 5 7 8 4 9 9 7 1 1 9 0 7 8 3 4 8 6 3 8 0 9 6 2 1 0 1 0
##  [8025] 6 2 3 8 9 0 7 2 3 4 5 5 2 8 5 4 6 6 6 7 9 1 8 2 1 5 3 4 7 9 4 0 0 0
##  [8059] 1 1 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 9 0 1 3 1 5 1 2
##  [8093] 4 9 8 4 6 8 0 1 1 9 2 6 6 8 7 4 2 9 7 0 2 1 0 3 6 0 1 2 3 4 5 6 7 8
##  [8127] 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 8 6 5 9 7 0 2 3 4 3 8 5 1
##  [8161] 5 2 3 0 1 2 1 3 2 6 5 3 0 7 2 7 4 6 4 0 5 9 9 8 9 5 3 1 7 4 7 6 5 4
##  [8195] 0 0 6 6 2 0 6 3 7 7 4 4 3 9 2 8 9 6 0 9 5 3 8 8 7 1 4 0 4 8 5 2 3 9
##  [8229] 0 1 9 1 5 1 7 4 8 6 2 1 6 8 8 0 1 2 9 4 7 8 9 0 1 2 3 4 6 7 8 9 0 1
##  [8263] 2 3 4 7 8 9 1 4 5 3 3 0 9 5 4 9 0 8 4 6 7 0 7 7 1 6 9 1 3 6 2 3 8 2
##  [8297] 3 8 9 5 8 8 7 1 7 1 1 0 3 4 2 6 4 7 4 2 7 4 2 9 2 7 9 2 1 6 6 5 3 4
##  [8331] 8 5 9 6 9 0 6 3 0 8 1 6 0 0 1 2 3 4 5 6 7 0 1 2 3 4 7 8 9 0 1 2 3 4
##  [8365] 7 2 5 1 6 4 3 9 9 0 9 7 1 6 4 3 6 2 0 9 8 6 5 7 0 0 1 7 4 3 2 4 1 3
##  [8399] 7 6 4 7 7 7 9 8 4 3 8 2 8 3 5 8 0 5 4 7 1 3 1 7 9 6 2 0 9 1 7 3 3 9
##  [8433] 1 6 4 3 9 8 2 1 8 6 4 1 5 5 6 5 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7
##  [8467] 8 9 0 1 2 3 4 5 6 7 8 9 6 9 7 0 2 3 4 3 8 5 1 3 0 1 2 1 3 2 0 7 2 6
##  [8501] 4 0 5 9 9 8 9 5 3 1 7 4 7 0 0 6 6 6 3 7 4 2 8 9 8 7 1 9 0 4 8 5 2 3
##  [8535] 9 0 1 9 1 5 1 7 6 1 2 1 6 8 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 0
##  [8569] 1 2 3 5 6 7 8 1 0 4 5 6 6 3 4 4 2 8 1 0 6 4 9 7 2 9 2 0 9 3 3 9 1 5
##  [8603] 2 3 1 6 7 3 7 8 4 0 2 4 0 2 4 7 8 0 7 0 6 9 3 2 4 8 6 0 5 7 5 1 0 8
##  [8637] 1 6 7 2 9 7 9 5 6 5 2 6 2 8 1 7 5 5 7 3 5 0 1 1 3 8 4 9 4 5 1 8 6 8
##  [8671] 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 3 5 3
##  [8705] 2 9 3 2 1 4 5 5 2 3 2 1 3 9 7 2 1 2 8 9 1 8 8 7 8 1 0 0 6 7 7 8 7 5
##  [8739] 0 6 1 5 7 4 6 1 2 5 0 7 9 9 0 3 4 4 8 4 1 8 6 5 9 0 0 0 3 7 1 6 4 6
##  [8773] 0 4 5 4 1 3 8 6 3 9 9 5 9 3 7 8 5 6 4 7 6 2 2 0 9 4 0 1 2 3 4 5 6 7
##  [8807] 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 6 4 2 6 4 7 5 5 4 7 2 9
##  [8841] 3 9 3 8 2 0 9 5 6 0 1 0 6 5 3 5 3 8 0 0 3 4 1 5 3 0 8 3 0 6 2 7 8 1
##  [8875] 7 1 3 8 5 4 2 0 9 7 6 7 4 1 6 2 6 7 1 9 8 0 6 9 4 9 9 6 2 3 7 1 9 2
##  [8909] 2 5 3 7 8 0 1 2 3 4 7 8 9 0 1 2 3 4 7 8 9 0 1 7 8 9 8 9 2 6 1 3 5 4
##  [8943] 8 2 6 4 3 4 5 9 2 0 3 9 4 9 7 3 8 7 4 4 9 8 5 8 2 6 6 2 3 1 3 2 7 3
##  [8977] 1 9 0 1 1 3 5 0 7 8 1 5 1 4 6 0 0 4 9 1 6 6 9 0 7 6 1 1 0 1 2 3 4 2
##  [9011] 2 3 4 5 6 2 0 1 2 2 8 6 3 9 2 1 9 3 9 6 1 7 2 4 4 5 7 0 0 1 6 6 8 2
##  [9045] 7 7 2 4 2 1 6 1 0 6 9 8 3 9 6 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7
##  [9079] 8 9 0 1 2 3 4 5 6 7 8 9 1 6 8 9 9 0 1 2 4 4 3 7 4 4 4 0 3 8 7 5 8 2
##  [9113] 1 7 5 3 8 5 2 5 1 1 6 2 1 3 8 6 4 2 6 2 5 5 0 2 8 0 6 8 1 7 9 1 9 2
##  [9147] 6 7 6 6 8 7 4 9 2 1 3 3 0 5 5 8 0 3 7 9 7 0 2 7 9 1 7 8 0 3 5 3 6 0
##  [9181] 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 7 8 9 6 4 2 6 4 7 8
##  [9215] 9 2 9 3 9 3 0 0 1 0 4 2 6 3 5 3 0 3 4 1 5 3 0 8 3 0 6 1 7 8 0 9 2 6
##  [9249] 7 1 9 6 9 4 9 9 6 7 1 2 5 3 7 8 0 1 2 4 5 6 7 8 9 0 1 3 4 5 6 7 8 0
##  [9283] 1 3 4 7 8 9 7 5 5 1 9 9 7 1 0 0 5 9 7 1 7 2 2 3 6 8 3 2 0 0 6 1 7 5
##  [9317] 8 6 2 9 4 8 8 7 1 0 8 7 7 5 8 5 3 4 6 1 1 5 5 0 7 2 3 6 4 1 2 4 1 5
##  [9351] 4 2 0 4 8 6 1 9 0 2 5 6 9 3 6 3 6 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6
##  [9385] 7 8 9 0 1 2 3 5 6 7 8 1 0 9 5 7 5 1 8 6 9 0 4 1 9 3 8 4 4 7 0 1 9 2
##  [9419] 8 7 8 2 3 9 6 0 6 5 5 3 3 3 9 8 1 1 0 6 1 0 0 6 2 1 1 3 2 7 7 8 8 7
##  [9453] 8 4 6 0 2 0 7 0 3 6 8 7 1 5 9 9 3 7 2 4 9 4 3 6 2 2 5 3 2 5 5 9 4 1
##  [9487] 7 2 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 1 0
##  [9521] 1 2 7 5 3 4 4 0 0 6 9 6 6 5 7 2 3 4 4 9 1 4 0 7 9 5 7 2 3 1 4 4 0 9
##  [9555] 9 6 1 8 3 3 7 3 9 8 8 4 7 7 6 2 1 9 8 7 8 8 7 2 2 3 9 3 3 5 5 0 7 4
##  [9589] 5 6 5 1 4 1 1 2 8 2 6 1 5 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0
##  [9623] 1 2 3 4 5 6 7 8 8 0 6 0 1 2 3 7 9 4 7 1 9 1 7 1 4 0 0 1 7 5 7 1 3 3
##  [9657] 3 1 6 9 7 1 3 0 7 6 0 8 9 4 3 5 4 8 1 5 9 0 6 3 3 8 1 4 7 5 2 0 0 1
##  [9691] 7 8 7 6 8 8 2 3 6 1 8 9 5 2 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9
##  [9725] 0 1 2 3 4 6 6 7 8 9 7 4 6 1 4 0 9 9 3 7 8 2 7 5 8 5 3 2 2 0 5 8 6 0
##  [9759] 3 8 1 0 3 0 4 7 4 9 0 9 0 7 1 7 1 6 6 5 6 2 8 7 6 4 9 9 5 3 7 4 3 0
##  [9793] 7 6 6 1 1 3 2 1 0 0 1 2 3 4 7 8 9 0 1 2 3 4 5 6 7 8 0 1 2 3 4 7 8 9
##  [9827] 0 8 3 9 5 5 2 6 8 4 1 7 1 7 3 5 6 9 1 1 1 2 1 2 0 7 7 5 8 2 9 8 8 7
##  [9861] 3 4 6 8 7 0 4 2 7 7 5 4 3 4 2 8 1 5 1 0 2 3 3 5 7 0 6 8 6 3 9 9 8 2
##  [9895] 7 7 1 0 1 7 8 9 0 1 8 3 4 5 6 7 8 0 1 2 3 4 7 8 9 7 8 6 4 1 9 3 8 4
##  [9929] 4 7 0 1 9 2 8 7 8 2 6 0 4 5 3 3 3 9 1 4 0 6 1 0 0 6 2 1 1 7 7 8 4 6
##  [9963] 0 7 0 3 6 8 7 1 5 2 4 9 4 3 6 4 1 7 2 6 6 0 1 2 3 4 5 6 7 8 9 0 1 2
##  [9997] 3 4 5 6