# Load required libraries
library(ISLR2) # For the Wage dataset
library(boot) # For cross-validation
#library(bootStepAIC) # For stepAIC function
# Load the Wage dataset
data(Wage)
# Polynomial regression with cross-validation to select optimal degree
set.seed(123) # For reproducibility
cv.error <- rep(NA, 10) # Vector to store cross-validation errors
for (d in 1:10) {
fit <- glm(wage ~ poly(age, d), data = Wage)
cv.error[d] <- cv.glm(Wage, fit, K = 10)$delta[1]
}
# Optimal degree selected using cross-validation
optimal_degree <- which.min(cv.error)
cat("Optimal Degree selected using cross-validation:", optimal_degree, "\n")
## Optimal Degree selected using cross-validation: 10
# ANOVA hypothesis testing to select optimal degree
fit_ano <- lm(wage ~ poly(age, 10), data = Wage)
summary_aov <- anova(fit_ano, test = "F")
# Degree selected using ANOVA
degree_ANOVA <- which.max(summary_aov$'Pr(>F)'[1:10])
cat("Degree selected using ANOVA:", degree_ANOVA, "\n")
## Degree selected using ANOVA: 1
# Plotting the resulting polynomial fit to the data
plot(Wage$age, Wage$wage, col = "blue", xlab = "Age", ylab = "Wage", main = "Polynomial Regression Fit")
points(Wage$age, fitted(fit), col = "red", pch = 20)
legend("topright", legend = c("Actual", "Polynomial Fit"), col = c("blue", "red"), pch = c(1, 20))
# Plotting the resulting polynomial fit to the data based on ANOVA-selected degree
plot(Wage$age, Wage$wage, col = "blue", xlab = "Age", ylab = "Wage", main = "Polynomial Regression Fit (Degree 1)")
lines(sort(Wage$age), predict(fit_ano, newdata = data.frame(age = sort(Wage$age))), col = "red")
legend("topright", legend = c("Actual", "Polynomial Fit"), col = c("blue", "red"), lty = c(NA, 1), pch = c(1, NA))
# Load necessary libraries
library(boot) # For cross-validation
# Load the Wage dataset
data(Wage)
# Define the number of folds for cross-validation
k <- 10
# Define a function to compute the mean squared error for a step function
step_mse <- function(data, cuts) {
# Create age groups based on the specified number of cuts
data$age_group <- cut(data$age, cuts)
# Fit a linear regression model
lm_fit <- lm(wage ~ age_group, data = data)
# Compute mean squared error
mse <- mean(lm_fit$residuals^2)
return(mse)
}
# Perform cross-validation to choose the optimal number of cuts
cv_step <- function(data, max_cuts) {
cv_error <- rep(0, max_cuts - 1)
for (i in 2:max_cuts) {
cv_error[i - 1] <- mean(sapply(split(data, cut(data$age, i)), step_mse, cuts = i))
}
return(cv_error)
}
# Perform cross-validation
cv_errors <- cv_step(Wage, max_cuts = 10)
# Find the optimal number of cuts with minimum CV error
optimal_cuts <- which.min(cv_errors) + 1 # Adding 1 because indexing starts from 1
# Plot the cross-validation error
plot(2:10, cv_errors, type = 'b', xlab = 'Number of Cuts', ylab = 'Cross-Validation Error',
main = 'Cross-Validation Error vs. Number of Cuts')
points(optimal_cuts, cv_errors[optimal_cuts - 1], col = 'red', pch = 19)
legend("topright", legend = paste("Optimal Cuts =", optimal_cuts), col = 'red', pch = 19)
# Fit the final model with the optimal number of cuts
Wage$age_group <- cut(Wage$age, optimal_cuts)
lm_final <- lm(wage ~ age_group, data = Wage)
# Plot the fit obtained
plot(Wage$age, Wage$wage, xlab = 'Age', ylab = 'Wage', main = 'Step Function Fit', col = 'blue')
lines(Wage$age, predict(lm_final), col = 'red', lwd = 2)
# Load necessary libraries
library(mgcv)
## Loading required package: nlme
## This is mgcv 1.8-42. For overview type 'help("mgcv-package")'.
library(ggplot2)
# Fit a GAM to explore the relationship between wage and age, marital status, and job class
gam_model <- gam(wage ~ s(age) + maritl + jobclass, data = Wage)
# Summary of the GAM
summary(gam_model)
##
## Family: gaussian
## Link function: identity
##
## Formula:
## wage ~ s(age) + maritl + jobclass
##
## Parametric coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 94.512 1.898 49.803 < 2e-16 ***
## maritl2. Married 14.501 2.061 7.036 2.44e-12 ***
## maritl3. Widowed -1.887 9.113 -0.207 0.836
## maritl4. Divorced -2.048 3.345 -0.612 0.540
## maritl5. Separated -3.324 5.533 -0.601 0.548
## jobclass2. Information 15.204 1.423 10.686 < 2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Approximate significance of smooth terms:
## edf Ref.df F p-value
## s(age) 5.027 6.116 19.26 <2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## R-sq.(adj) = 0.141 Deviance explained = 14.4%
## GCV = 1501.8 Scale est. = 1496.3 n = 3000
# Plot the results
plot(gam_model, pages = 1)
# Predicted vs. observed plot
plot(Wage$wage, predict(gam_model), xlab = "Observed Wage", ylab = "Predicted Wage",
main = "Predicted vs. Observed Wage")
# Explore the relationship between wage and marital status using boxplots
ggplot(Wage, aes(x = maritl, y = wage)) +
geom_boxplot() +
labs(x = "Marital Status", y = "Wage", title = "Relationship between Marital Status and Wage")
# Explore the relationship between wage and job class using boxplots
ggplot(Wage, aes(x = jobclass, y = wage)) +
geom_boxplot() +
labs(x = "Job Class", y = "Wage", title = "Relationship between Job Class and Wage")
Parametric Coefficients: The intercept represents the estimated average wage for individuals in the reference category of the categorical variables maritl (Never Married) and jobclass (Industrial).
Coefficients for other categories of maritl and jobclass represent the difference in average wage compared to the reference category. For example, individuals who are Married (maritl2. Married) have an estimated average wage that is 14.501 units higher than individuals who are Never Married, holding other variables constant.
Some categories, such as Widowed, Divorced, and Separated, do not show significant differences in average wage compared to the reference category (Never Married), as indicated by their high p-values.
Smooth Terms (Age): The smooth term for age (s(age)) suggests a non-linear relationship between age and wage. The estimated degrees of freedom (edf) indicate that the relationship is modeled using approximately 5.027 degrees of freedom, implying a moderately complex relationship.
The F-test and associated p-value assess the overall significance of the smooth term for age. In this case, the p-value is highly significant (p < 2e-16), indicating that age is a significant predictor of wage after accounting for other variables in the model.
Adjusted R-squared and Deviance Explained: The adjusted R-squared value (R-sq.(adj)) measures the proportion of variance in the response variable (wage) explained by the model, adjusted for the number of predictors. In this case, the adjusted R-squared is 0.141, indicating that approximately 14.1% of the variability in wage is explained by the model.
The Deviance explained represents the percentage reduction in deviance achieved by the model compared to a null model with no predictors. In this case, the model explains 14.4% of the deviance, indicating a modest but significant improvement over the null model.
Generalized Cross Validation (GCV) and Scale Estimation: GCV provides an estimate of the predictive performance of the model. A lower GCV value suggests better model fit, although the absolute value itself may not be interpretable. Here, the GCV is 1501.8.
Scale estimation (Scale est.) provides an estimate of the error variance in the model. In this case, the scale estimate is 1496.3.
Overall, the GAM suggests that age and job class are significant predictors of wage, with a non-linear relationship observed for age. Marital status, however, shows less significance in explaining wage variability in this model.
# Load the Boston dataset
data(Boston)
# Fit cubic polynomial regression
model <- lm(nox ~ poly(dis, 3), data = Boston)
# Display regression output
summary(model)
##
## Call:
## lm(formula = nox ~ poly(dis, 3), data = Boston)
##
## Residuals:
## Min 1Q Median 3Q Max
## -0.121130 -0.040619 -0.009738 0.023385 0.194904
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 0.554695 0.002759 201.021 < 2e-16 ***
## poly(dis, 3)1 -2.003096 0.062071 -32.271 < 2e-16 ***
## poly(dis, 3)2 0.856330 0.062071 13.796 < 2e-16 ***
## poly(dis, 3)3 -0.318049 0.062071 -5.124 4.27e-07 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 0.06207 on 502 degrees of freedom
## Multiple R-squared: 0.7148, Adjusted R-squared: 0.7131
## F-statistic: 419.3 on 3 and 502 DF, p-value: < 2.2e-16
# Plot the data and polynomial fits
plot(Boston$dis, Boston$nox, xlab = "dis", ylab = "nox", main = "Cubic Polynomial Regression")
lines(sort(Boston$dis), predict(model, data.frame(dis = sort(Boston$dis))), col = "red", lwd = 2)
# Define a function to fit polynomial regression and calculate RSS
fit_polynomial <- function(degree) {
model <- lm(nox ~ poly(dis, degree), data = Boston)
rss <- sum(model$residuals^2)
return(list(model = model, rss = rss))
}
# Initialize lists to store RSS and models
rss_values <- numeric(10)
models <- list()
# Fit polynomial regressions for degrees 1 to 10
for (degree in 1:10) {
fit <- fit_polynomial(degree)
rss_values[degree] <- fit$rss
models[[degree]] <- fit$model
}
# Plot the polynomial fits and report RSS
par(mfrow = c(2, 5), mar = c(4, 4, 2, 2))
for (degree in 1:10) {
plot(Boston$dis, Boston$nox, xlab = "dis", ylab = "nox", main = paste("Degree", degree))
lines(sort(Boston$dis), predict(models[[degree]], data.frame(dis = sort(Boston$dis))), col = "red", lwd = 2)
text(4, max(Boston$nox), paste("RSS:", round(rss_values[degree], digits = 2)), pos = 4, col = "blue")
}
library(boot)
# Function to calculate mean squared error (MSE)
calculate_mse <- function(actual, predicted) {
return(mean((actual - predicted)^2))
}
# Define k-fold cross-validation function
k_fold_cv <- function(data, k, max_degree) {
# Initialize vector to store mean squared errors
mse_values <- numeric(max_degree)
# Perform k-fold cross-validation for each degree
for (degree in 1:max_degree) {
mse <- numeric(k)
for (i in 1:k) {
# Split data into training and validation sets
validation_indices <- ((i - 1) * floor(nrow(data) / k) + 1):(i * floor(nrow(data) / k))
validation_set <- data[validation_indices, ]
training_set <- data[-validation_indices, ]
# Fit polynomial regression model on training set
model <- lm(nox ~ poly(dis, degree), data = training_set)
# Predict on validation set
predicted <- predict(model, newdata = validation_set)
# Calculate mean squared error
mse[i] <- calculate_mse(validation_set$nox, predicted)
}
# Calculate average MSE across folds for this degree
mse_values[degree] <- mean(mse)
}
return(mse_values)
}
# Set number of folds for cross-validation
k <- 10
# Perform k-fold cross-validation for degrees 1 to 10
cv_errors <- k_fold_cv(Boston, k, max_degree = 10)
# Find the optimal degree that minimizes cross-validation error
optimal_degree <- which.min(cv_errors)
# Plot cross-validation errors
plot(1:10, cv_errors, type = "b", xlab = "Degree", ylab = "Cross-validation Error", main = "Cross-validation Error vs. Degree")
# Highlight the optimal degree
points(optimal_degree, cv_errors[optimal_degree], col = "red", pch = 19)
text(optimal_degree, cv_errors[optimal_degree], paste("Degree:", optimal_degree), pos = 4, col = "red")
uadratic Relationship: The quadratic model suggests that as the distance to employment centers (dis) increases, the nitrogen oxides concentration (nox) initially decreases, reaches a minimum point, and then starts increasing again. This indicates a non-linear relationship between the distance to employment centers and nitrogen oxides concentration.
Optimal Complexity: The selection of degree 2 implies that adding further complexity to the model (e.g., using higher degrees) does not significantly improve predictive performance. A quadratic model strikes a balance between simplicity and flexibility, capturing the curvature in the relationship without overfitting to noise in the data.
Interpretability: A quadratic polynomial is relatively easy to interpret compared to higher-order polynomials. It allows for clear visualization and understanding of how changes in distance to employment centers relate to changes in nitrogen oxides concentration.
Practical Implications: The finding that the nitrogen oxides concentration initially decreases with distance to employment centers but then increases suggests potential policy implications. For example, it could indicate that there might be a distance threshold beyond which efforts to reduce emissions from employment centers could have diminishing returns or even unintended consequences.
Overall, the selection of a quadratic polynomial suggests a nuanced relationship between distance to employment centers and nitrogen oxides concentration, providing insights that can inform decision-making in urban planning, environmental management, and public health.
library(splines)
# Fit regression spline with four degrees of freedom
model_spline <- lm(nox ~ bs(dis, df = 4), data = Boston)
# Display regression output
summary(model_spline)
##
## Call:
## lm(formula = nox ~ bs(dis, df = 4), data = Boston)
##
## Residuals:
## Min 1Q Median 3Q Max
## -0.124622 -0.039259 -0.008514 0.020850 0.193891
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 0.73447 0.01460 50.306 < 2e-16 ***
## bs(dis, df = 4)1 -0.05810 0.02186 -2.658 0.00812 **
## bs(dis, df = 4)2 -0.46356 0.02366 -19.596 < 2e-16 ***
## bs(dis, df = 4)3 -0.19979 0.04311 -4.634 4.58e-06 ***
## bs(dis, df = 4)4 -0.38881 0.04551 -8.544 < 2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 0.06195 on 501 degrees of freedom
## Multiple R-squared: 0.7164, Adjusted R-squared: 0.7142
## F-statistic: 316.5 on 4 and 501 DF, p-value: < 2.2e-16
# Plot the resulting fit
plot(Boston$dis, Boston$nox, xlab = "dis", ylab = "nox", main = "Regression Spline Fit")
lines(sort(Boston$dis), predict(model_spline, data.frame(dis = sort(Boston$dis))), col = "red", lwd = 2)
The knots for the spline are chosen automatically by the bs() function based on the distribution of the predictor variable dis. This method ensures that the spline captures the non-linear relationship between dis and nox effectively while avoiding overfitting.
library(splines)
# Initialize vectors to store RSS and models
rss_values <- numeric(10)
models <- list()
# Fit regression spline for degrees of freedom from 3 to 12
for (df in 3:12) {
model <- lm(nox ~ bs(dis, df = df), data = Boston)
rss <- sum(residuals(model)^2)
rss_values[df-2] <- rss
models[[df-2]] <- model
}
# Plot the resulting fits and report RSS
par(mfrow = c(2, 5), mar = c(4, 4, 2, 2))
for (df in 3:12) {
plot(Boston$dis, Boston$nox, xlab = "dis", ylab = "nox", main = paste("DF =", df))
lines(sort(Boston$dis), predict(models[[df-2]], data.frame(dis = sort(Boston$dis))), col = "red", lwd = 2)
text(4, max(Boston$nox), paste("RSS:", round(rss_values[df-2], digits = 2)), pos = 4, col = "blue")
}
Decreasing RSS: As the degrees of freedom increase (i.e., the flexibility of the spline increases), the models tend to have lower RSS values. This indicates that higher degrees of freedom allow the spline to better capture the non-linear relationship between the predictor variable dis and the response variable nox, resulting in a better fit to the data.
Overfitting: At very high degrees of freedom, there might be a tendency for the spline to overfit the data, capturing noise and variability that are not representative of the underlying relationship between dis and nox. This can be inferred if the RSS starts to increase again after reaching a minimum, indicating that the model is becoming overly complex.
Balancing Complexity and Fit: There is typically a trade-off between model complexity (determined by the degrees of freedom) and goodness of fit (indicated by the RSS). The goal is to select a model that adequately captures the underlying non-linear relationship while avoiding excessive complexity. This balance can often be achieved by choosing a moderate number of degrees of freedom that provides a good fit without overfitting.
Visual Inspection of Fits: By examining the plots of the spline fits for different degrees of freedom, it’s possible to visually assess how well the spline captures the curvature and variability in the data. Models with appropriate degrees of freedom should closely follow the data pattern without excessive oscillations or deviations.
Overall, the results obtained from fitting regression splines with varying degrees of freedom help in understanding the relationship between dis and nox and in selecting an appropriate level of flexibility for the spline model that strikes a balance between capturing the underlying non-linear relationship and avoiding overfitting.
library(splines)
# Remove missing values from the data
Boston_clean <- na.omit(Boston)
# Initialize vectors to store cross-validated errors
cv_errors <- numeric(10)
# Perform k-fold cross-validation for degrees of freedom from 3 to 12
for (df in 3:12) {
cv <- cv.glm(Boston_clean, glm(nox ~ bs(dis, df = df), data = Boston_clean), K = 10)
cv_errors[df-2] <- mean(cv$delta^2)
}
# Find the optimal degrees of freedom with the lowest cross-validated error
optimal_df <- which.min(cv_errors) + 2 # Adding 2 to account for starting from degree 3
# Plot cross-validated errors
plot(3:12, cv_errors, type = "b", xlab = "Degrees of Freedom", ylab = "Cross-validated Error",
main = "Cross-validated Error vs. Degrees of Freedom")
points(optimal_df, cv_errors[optimal_df - 2], col = "red", pch = 19)
Based on the cross-validation results, the optimal degrees of freedom for the regression spline model on the Boston dataset were determined to be 5. This indicates that a spline with 5 degrees of freedom provides the best balance between model complexity and predictive performance.
Here’s a description of the results:
Optimal Degrees of Freedom: The selected degrees of freedom represent the flexibility of the spline model in capturing the non-linear relationship between the predictor variable (dis) and the response variable (nox). In this case, a spline with 5 degrees of freedom is deemed to be the most suitable for accurately representing the underlying relationship in the data.
Cross-validated Error: The plot of cross-validated errors against degrees of freedom shows how the model’s performance varies with different degrees of freedom. The cross-validated error tends to decrease initially as the degrees of freedom increase, indicating improved model flexibility. However, after reaching the optimal point (in this case, at 5 degrees of freedom), further increasing the degrees of freedom may lead to overfitting and higher cross-validated errors.
Model Interpretability and Complexity: While increasing the degrees of freedom beyond 5 may result in better fit to the training data, it could lead to a more complex model that may not generalize well to unseen data. Therefore, selecting the optimal degrees of freedom is crucial for achieving a balance between model interpretability and predictive accuracy.
In summary, the selection of 5 degrees of freedom for the regression spline model indicates a model that captures the non-linear relationship between dis and nox effectively while avoiding excessive complexity. This model can be used for prediction and interpretation with confidence in its predictive performance.
# Set seed for reproducibility
set.seed(123)
# Number of observations
n <- 100
# Generate predictor variables X1 and X2
X1 <- rnorm(n)
X2 <- rnorm(n)
# Generate response variable Y
Y <- 2*X1 + 3*X2 + rnorm(n, mean = 0, sd = 0.5)
# Initialize beta hat for coefficient beta1
beta_hat_1 <- 0.5
# Calculate 'a' as Y - beta1*X1
a <- Y - beta_hat_1 * X1
# Fit simple linear regression with 'a' as response and X2 as predictor
lm_result <- lm(a ~ X2)
# Extract the coefficient beta2
beta2 <- coef(lm_result)[2]
# Calculate 'a' as Y - beta2*X2
a <- Y - beta2 * X2
# Fit simple linear regression with 'a' as response and X1 as predictor
lm_result <- lm(a ~ X1)
# Extract the coefficient beta1
beta1 <- coef(lm_result)[2]
# Initialize vectors to store coefficient estimates
beta0_estimates <- numeric(1000)
beta1_estimates <- numeric(1000)
beta2_estimates <- numeric(1000)
# Initial value for beta_hat_1
beta_hat_1 <- 0.5
# Loop for 1000 iterations
for (i in 1:1000) {
# Fit model with beta_hat_1 fixed
a <- Y - beta_hat_1 * X1
lm_result <- lm(a ~ X2)
beta2 <- coef(lm_result)[2]
# Fit model with beta2 fixed
a <- Y - beta2 * X2
lm_result <- lm(a ~ X1)
beta1 <- coef(lm_result)[2]
# Store coefficient estimates
beta0_estimates[i] <- coef(lm_result)[1]
beta1_estimates[i] <- beta1
beta2_estimates[i] <- beta2
# Print estimates at each iteration
cat("Iteration:", i, "\tBeta0:", coef(lm_result)[1], "\tBeta1:", beta1, "\tBeta2:", beta2, "\n")
}
## Iteration: 1 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 2 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 3 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 4 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 5 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 6 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 7 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 8 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 9 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 10 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 11 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 12 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 13 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 14 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 15 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 16 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 17 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 18 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 19 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 20 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 21 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 22 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 23 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 24 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 25 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 26 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 27 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 28 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 29 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 30 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 31 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 32 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 33 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 34 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 35 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 36 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 37 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 38 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 39 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 40 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 41 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 42 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 43 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 44 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 45 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 46 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 47 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 48 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 49 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 50 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 51 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 52 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 53 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 54 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 55 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 56 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 57 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 58 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 59 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 60 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 61 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 62 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 63 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 64 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 65 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 66 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 67 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 68 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 69 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 70 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 71 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 72 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 73 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 74 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 75 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 76 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 77 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 78 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 79 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 80 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 81 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 82 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 83 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 84 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 85 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 86 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 87 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 88 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 89 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 90 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 91 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 92 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 93 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 94 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 95 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 96 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 97 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 98 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 99 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 100 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 101 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 102 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 103 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 104 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 105 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 106 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 107 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 108 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 109 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 110 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 111 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 112 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 113 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 114 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 115 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 116 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 117 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 118 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 119 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 120 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 121 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 122 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 123 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 124 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 125 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 126 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 127 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 128 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 129 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 130 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 131 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 132 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 133 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 134 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 135 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 136 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 137 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 138 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 139 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 140 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 141 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 142 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 143 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 144 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 145 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 146 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 147 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 148 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 149 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 150 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 151 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 152 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 153 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 154 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 155 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 156 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 157 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 158 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 159 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 160 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 161 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 162 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 163 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 164 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 165 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 166 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 167 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 168 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 169 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 170 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 171 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 172 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 173 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 174 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 175 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 176 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 177 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 178 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 179 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 180 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 181 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 182 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 183 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 184 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 185 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 186 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 187 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 188 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 189 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 190 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 191 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 192 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 193 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 194 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 195 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 196 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 197 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 198 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 199 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 200 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 201 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 202 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 203 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 204 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 205 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 206 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 207 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 208 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 209 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 210 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 211 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 212 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 213 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 214 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 215 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 216 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 217 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 218 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 219 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 220 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 221 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 222 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 223 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 224 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 225 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 226 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 227 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 228 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 229 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 230 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 231 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 232 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 233 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 234 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 235 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 236 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 237 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 238 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 239 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 240 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 241 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 242 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 243 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 244 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 245 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 246 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 247 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 248 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 249 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 250 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 251 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 252 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 253 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 254 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 255 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 256 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 257 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 258 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 259 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 260 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 261 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 262 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 263 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 264 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 265 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 266 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 267 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 268 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 269 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 270 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 271 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 272 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 273 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 274 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 275 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 276 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 277 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 278 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 279 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 280 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 281 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 282 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 283 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 284 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 285 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 286 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 287 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 288 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 289 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 290 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 291 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 292 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 293 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 294 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 295 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 296 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 297 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 298 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 299 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 300 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 301 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 302 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 303 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 304 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 305 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 306 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 307 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 308 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 309 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 310 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 311 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 312 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 313 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 314 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 315 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 316 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 317 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 318 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 319 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 320 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 321 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 322 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 323 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 324 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 325 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 326 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 327 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 328 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 329 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 330 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 331 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 332 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 333 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 334 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 335 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 336 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 337 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 338 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 339 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 340 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 341 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 342 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 343 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 344 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 345 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 346 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 347 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 348 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 349 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 350 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 351 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 352 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 353 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 354 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 355 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 356 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 357 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 358 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 359 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 360 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 361 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 362 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 363 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 364 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 365 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 366 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 367 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 368 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 369 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 370 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 371 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 372 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 373 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 374 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 375 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 376 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 377 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 378 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 379 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 380 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 381 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 382 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 383 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 384 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 385 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 386 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 387 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 388 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 389 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 390 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 391 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 392 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 393 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 394 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 395 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 396 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 397 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 398 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 399 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 400 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 401 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 402 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 403 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 404 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 405 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 406 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 407 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 408 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 409 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 410 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 411 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 412 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 413 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 414 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 415 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 416 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 417 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 418 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 419 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 420 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 421 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 422 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 423 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 424 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 425 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 426 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 427 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 428 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 429 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 430 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 431 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 432 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 433 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 434 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 435 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 436 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 437 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 438 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 439 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 440 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 441 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 442 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 443 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 444 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 445 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 446 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 447 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 448 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 449 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 450 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 451 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 452 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 453 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 454 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 455 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 456 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 457 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 458 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 459 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 460 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 461 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 462 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 463 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 464 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 465 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 466 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 467 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 468 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 469 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 470 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 471 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 472 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 473 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 474 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 475 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 476 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 477 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 478 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 479 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 480 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 481 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 482 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 483 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 484 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 485 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 486 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 487 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 488 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 489 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 490 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 491 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 492 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 493 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 494 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 495 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 496 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 497 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 498 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 499 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 500 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 501 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 502 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 503 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 504 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 505 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 506 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 507 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 508 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 509 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 510 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 511 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 512 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 513 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 514 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 515 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 516 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 517 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 518 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 519 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 520 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 521 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 522 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 523 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 524 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 525 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 526 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 527 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 528 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 529 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 530 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 531 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 532 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 533 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 534 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 535 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 536 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 537 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 538 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 539 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 540 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 541 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 542 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 543 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 544 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 545 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 546 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 547 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 548 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 549 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 550 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 551 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 552 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 553 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 554 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 555 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 556 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 557 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 558 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 559 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 560 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 561 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 562 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 563 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 564 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 565 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 566 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 567 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 568 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 569 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 570 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 571 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 572 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 573 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 574 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 575 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 576 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 577 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 578 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 579 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 580 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 581 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 582 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 583 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 584 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 585 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 586 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 587 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 588 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 589 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 590 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 591 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 592 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 593 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 594 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 595 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 596 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 597 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 598 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 599 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 600 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 601 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 602 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 603 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 604 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 605 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 606 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 607 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 608 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 609 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 610 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 611 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 612 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 613 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 614 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 615 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 616 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 617 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 618 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 619 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 620 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 621 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 622 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 623 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 624 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 625 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 626 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 627 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 628 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 629 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 630 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 631 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 632 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 633 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 634 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 635 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 636 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 637 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 638 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 639 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 640 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 641 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 642 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 643 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 644 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 645 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 646 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 647 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 648 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 649 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 650 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 651 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 652 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 653 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 654 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 655 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 656 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 657 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 658 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 659 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 660 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 661 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 662 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 663 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 664 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 665 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 666 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 667 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 668 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 669 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 670 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 671 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 672 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 673 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 674 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 675 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 676 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 677 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 678 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 679 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 680 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 681 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 682 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 683 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 684 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 685 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 686 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 687 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 688 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 689 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 690 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 691 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 692 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 693 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 694 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 695 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 696 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 697 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 698 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 699 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 700 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 701 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 702 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 703 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 704 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 705 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 706 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 707 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 708 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 709 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 710 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 711 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 712 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 713 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 714 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 715 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 716 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 717 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 718 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 719 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 720 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 721 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 722 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 723 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 724 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 725 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 726 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 727 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 728 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 729 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 730 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 731 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 732 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 733 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 734 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 735 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 736 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 737 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 738 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 739 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 740 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 741 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 742 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 743 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 744 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 745 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 746 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 747 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 748 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 749 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 750 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 751 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 752 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 753 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 754 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 755 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 756 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 757 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 758 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 759 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 760 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 761 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 762 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 763 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 764 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 765 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 766 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 767 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 768 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 769 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 770 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 771 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 772 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 773 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 774 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 775 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 776 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 777 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 778 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 779 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 780 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 781 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 782 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 783 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 784 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 785 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 786 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 787 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 788 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 789 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 790 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 791 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 792 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 793 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 794 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 795 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 796 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 797 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 798 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 799 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 800 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 801 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 802 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 803 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 804 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 805 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 806 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 807 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 808 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 809 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 810 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 811 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 812 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 813 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 814 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 815 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 816 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 817 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 818 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 819 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 820 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 821 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 822 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 823 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 824 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 825 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 826 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 827 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 828 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 829 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 830 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 831 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 832 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 833 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 834 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 835 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 836 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 837 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 838 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 839 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 840 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 841 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 842 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 843 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 844 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 845 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 846 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 847 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 848 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 849 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 850 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 851 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 852 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 853 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 854 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 855 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 856 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 857 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 858 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 859 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 860 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 861 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 862 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 863 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 864 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 865 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 866 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 867 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 868 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 869 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 870 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 871 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 872 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 873 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 874 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 875 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 876 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 877 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 878 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 879 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 880 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 881 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 882 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 883 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 884 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 885 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 886 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 887 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 888 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 889 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 890 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 891 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 892 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 893 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 894 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 895 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 896 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 897 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 898 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 899 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 900 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 901 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 902 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 903 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 904 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 905 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 906 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 907 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 908 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 909 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 910 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 911 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 912 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 913 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 914 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 915 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 916 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 917 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 918 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 919 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 920 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 921 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 922 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 923 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 924 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 925 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 926 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 927 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 928 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 929 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 930 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 931 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 932 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 933 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 934 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 935 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 936 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 937 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 938 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 939 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 940 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 941 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 942 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 943 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 944 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 945 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 946 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 947 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 948 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 949 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 950 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 951 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 952 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 953 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 954 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 955 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 956 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 957 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 958 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 959 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 960 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 961 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 962 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 963 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 964 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 965 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 966 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 967 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 968 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 969 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 970 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 971 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 972 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 973 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 974 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 975 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 976 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 977 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 978 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 979 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 980 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 981 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 982 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 983 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 984 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 985 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 986 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 987 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 988 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 989 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 990 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 991 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 992 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 993 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 994 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 995 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 996 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 997 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 998 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 999 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
## Iteration: 1000 Beta0: 0.06064258 Beta1: 1.929897 Beta2: 2.944883
# Plotting beta0
plot(1:1000, beta0_estimates, type = "l", col = "red", xlab = "Iteration", ylab = "Coefficient Estimate", main = "Coefficient Estimates Over Iterations - Beta0")
# Plotting beta1
plot(1:1000, beta1_estimates, type = "l", col = "blue", xlab = "Iteration", ylab = "Coefficient Estimate", main = "Coefficient Estimates Over Iterations - Beta1")
# Plotting beta2
plot(1:1000, beta2_estimates, type = "l", col = "green", xlab = "Iteration", ylab = "Coefficient Estimate", main = "Coefficient Estimates Over Iterations - Beta2")
# Fit multiple linear regression model
lm_result_multiple <- lm(Y ~ X1 + X2)
# Extract coefficient estimates from the multiple linear regression model
beta0_multiple <- coef(lm_result_multiple)[1]
beta1_multiple <- coef(lm_result_multiple)[2]
beta2_multiple <- coef(lm_result_multiple)[3]
# Plotting beta0 with overlay of multiple linear regression coefficient estimate
plot(1:1000, beta0_estimates, type = "l", col = "red", xlab = "Iteration", ylab = "Coefficient Estimate", main = "Coefficient Estimates Over Iterations - Beta0")
abline(h = beta0_multiple, col = "black")
# Plotting beta1 with overlay of multiple linear regression coefficient estimate
plot(1:1000, beta1_estimates, type = "l", col = "blue", xlab = "Iteration", ylab = "Coefficient Estimate", main = "Coefficient Estimates Over Iterations - Beta1")
abline(h = beta1_multiple, col = "black")
# Plotting beta2 with overlay of multiple linear regression coefficient estimate
plot(1:1000, beta2_estimates, type = "l", col = "green", xlab = "Iteration", ylab = "Coefficient Estimate", main = "Coefficient Estimates Over Iterations - Beta2")
abline(h = beta2_multiple, col = "black")
# Define a threshold for the change in coefficient estimates
threshold <- 0.001
# Initialize vectors to store coefficient estimates
beta0_estimates <- numeric(1000)
beta1_estimates <- numeric(1000)
beta2_estimates <- numeric(1000)
# Initial value for beta_hat_1
beta_hat_1 <- 0.5
# Loop for up to 1000 iterations
for (i in 1:1000) {
# Fit model with beta_hat_1 fixed
a <- Y - beta_hat_1 * X1
lm_result <- lm(a ~ X2)
beta2 <- coef(lm_result)[2]
# Fit model with beta2 fixed
a <- Y - beta2 * X2
lm_result <- lm(a ~ X1)
beta1 <- coef(lm_result)[2]
# Store coefficient estimates
beta0_estimates[i] <- coef(lm_result)[1]
beta1_estimates[i] <- beta1
beta2_estimates[i] <- beta2
# Check convergence
if (i > 1) {
# Calculate change in coefficient estimates
delta_beta0 <- abs(beta0_estimates[i] - beta0_estimates[i - 1])
delta_beta1 <- abs(beta1_estimates[i] - beta1_estimates[i - 1])
delta_beta2 <- abs(beta2_estimates[i] - beta2_estimates[i - 1])
# Check if changes are below threshold
if (delta_beta0 < threshold && delta_beta1 < threshold && delta_beta2 < threshold) {
cat("Convergence achieved after", i, "iterations.\n")
break
}
}
}
## Convergence achieved after 2 iterations.
# If convergence is not achieved after 1000 iterations
if (i == 1000) {
cat("Convergence not achieved after 1000 iterations.\n")
}