23 de marzo del 2019

Ejercicio Numero 1

Parte Uno: Cargar datos.

library(readr)
library(dplyr)
library(stargazer)
datos<- read_csv("C:/Users/ejhar/Desktop/econometria/ej2.csv")
head(datos,n=7)
## # A tibble: 7 x 3
##      X1    X2     Y
##   <dbl> <dbl> <dbl>
## 1  3.92  7298  0.75
## 2  3.61  6855  0.71
## 3  3.32  6636  0.66
## 4  3.07  6506  0.61
## 5  3.06  6450  0.7 
## 6  3.11  6402  0.72
## 7  3.21  6368  0.77

1. Calculos de la regresion lineal.

options(scipen = 9999)
modelo_lineal<- lm(formula = Y~X1+X2, data= datos)
summary(modelo_lineal)
## 
## Call:
## lm(formula = Y ~ X1 + X2, data = datos)
## 
## Residuals:
##       Min        1Q    Median        3Q       Max 
## -0.085090 -0.039102 -0.003341  0.030236  0.105692 
## 
## Coefficients:
##                Estimate  Std. Error t value            Pr(>|t|)    
## (Intercept)  1.56449677  0.07939598  19.705 0.00000000000000182 ***
## X1           0.23719747  0.05555937   4.269            0.000313 ***
## X2          -0.00024908  0.00003205  -7.772 0.00000009508790794 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.0533 on 22 degrees of freedom
## Multiple R-squared:  0.8653, Adjusted R-squared:  0.8531 
## F-statistic: 70.66 on 2 and 22 DF,  p-value: 0.000000000265

1. Ejemplo de regresion multiple.

stargazer(modelo_lineal, 
          title = "Regresión Multiple", 
          type = "text", digits = 8)
## 
## Regresión Multiple
## ===============================================
##                         Dependent variable:    
##                     ---------------------------
##                                  Y             
## -----------------------------------------------
## X1                         0.23719750***       
##                            (0.05555937)        
##                                                
## X2                        -0.00024908***       
##                            (0.00003205)        
##                                                
## Constant                   1.56449700***       
##                            (0.07939598)        
##                                                
## -----------------------------------------------
## Observations                    25             
## R2                          0.86529610         
## Adjusted R2                 0.85305030         
## Residual Std. Error    0.05330222 (df = 22)    
## F Statistic         70.66057000*** (df = 2; 22)
## ===============================================
## Note:               *p<0.1; **p<0.05; ***p<0.01

1. Objetos dentro del modelo lineal.

options(scipen = 9999)
modelo_lineal$coefficients
##   (Intercept)            X1            X2 
##  1.5644967711  0.2371974748 -0.0002490793

Matriz de varianza - Covarianza de los parametros \(V[\beta]\)

var_covar<-vcov(modelo_lineal)
print(var_covar)
##                  (Intercept)              X1                 X2
## (Intercept)  0.0063037218732  0.000240996434 -0.000000982806321
## X1           0.0002409964344  0.003086843196 -0.000001675537651
## X2          -0.0000009828063 -0.000001675538  0.000000001027106

1. Intervalos de confianza.

confint(object=modelo_lineal, level = .95)
##                     2.5 %        97.5 %
## (Intercept)  1.3998395835  1.7291539588
## X1           0.1219744012  0.3524205485
## X2          -0.0003155438 -0.0001826148

1. valores ajustados de Y(hat)·

plot(modelo_lineal$fitted.values, main = "Valores ajustados", 
     ylab = "Y", xlab = "X")

modelo_lineal$fitted.values %>% as.matrix()
##         [,1]
## 1  0.6765303
## 2  0.7133412
## 3  0.6991023
## 4  0.6721832
## 5  0.6837597
## 6  0.7075753
## 7  0.7397638
## 8  0.7585979
## 9  0.7943078
## 10 0.7935605
## 11 0.7984347
## 12 0.8272778
## 13 0.8021665
## 14 0.7992462
## 15 0.7544349
## 16 0.7339716
## 17 0.7048866
## 18 0.6930338
## 19 0.6350898
## 20 0.6127185
## 21 0.5701215
## 22 0.4796371
## 23 0.4374811
## 24 0.3953981
## 25 0.3773799

1. Residuos del modelo e^

plot(modelo_lineal$residuals, main = "Residuos", ylab = "residuos", 
     xlab = "casos")

modelo_lineal$residuals %>% matrix()
##               [,1]
##  [1,]  0.073469743
##  [2,] -0.003341163
##  [3,] -0.039102258
##  [4,] -0.062183196
##  [5,]  0.016240338
##  [6,]  0.012424659
##  [7,]  0.030236216
##  [8,] -0.018597878
##  [9,]  0.105692240
## [10,]  0.026439478
## [11,] -0.048434733
## [12,] -0.057277771
## [13,] -0.022166535
## [14,]  0.040753758
## [15,]  0.035565142
## [16,] -0.033971640
## [17,] -0.024886579
## [18,]  0.026966239
## [19,] -0.085089833
## [20,]  0.017281530
## [21,] -0.010121525
## [22,] -0.069637086
## [23,]  0.072518915
## [24,]  0.074601871
## [25,] -0.057379932

Ejercicio Numero 2

2. Cargar datos.

RutaYX<- read_csv("C:/Users/ejhar/Desktop/trabajo 1/matrizXY.csv")
RutaYX %>% mutate(X3=X1*X2) %>% select("Y","X1","X2","X3") -> datos2
head(datos2, n=7, 2)
## # A tibble: 7 x 4
##       Y    X1    X2    X3
##   <dbl> <dbl> <dbl> <dbl>
## 1   320    50   7.4 370  
## 2   450    53   5.1 270. 
## 3   370    60   4.2 252  
## 4   470    63   3.9 246. 
## 5   420    69   1.4  96.6
## 6   500    82   2.2 180. 
## 7   570   100   7   700

2. Calculos de la regresion lineal.

options(scipen = 9999)
modelo_lineal2<- lm(formula = Y~X1+X2+X3, data= datos2)
summary(modelo_lineal2)
## 
## Call:
## lm(formula = Y ~ X1 + X2 + X3, data = datos2)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -108.527  -37.595   -2.745   52.292  102.808 
## 
## Coefficients:
##              Estimate Std. Error t value Pr(>|t|)    
## (Intercept) 303.50401   71.54695   4.242 0.000621 ***
## X1            2.32927    0.47698   4.883 0.000166 ***
## X2          -25.07113   11.48487  -2.183 0.044283 *  
## X3            0.28617    0.07681   3.726 0.001840 ** 
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 67.68 on 16 degrees of freedom
## Multiple R-squared:  0.9634, Adjusted R-squared:  0.9566 
## F-statistic: 140.4 on 3 and 16 DF,  p-value: 0.00000000001054

2. Ejemplo de regresion multiple.

stargazer(modelo_lineal2, 
          title = "Regresión Multiple", 
          type = "text", digits = 8)
## 
## Regresión Multiple
## ================================================
##                         Dependent variable:     
##                     ----------------------------
##                                  Y              
## ------------------------------------------------
## X1                         2.32927500***        
##                             (0.47698220)        
##                                                 
## X2                         -25.07113000**       
##                            (11.48487000)        
##                                                 
## X3                         0.28616860***        
##                             (0.07681293)        
##                                                 
## Constant                  303.50400000***       
##                            (71.54695000)        
##                                                 
## ------------------------------------------------
## Observations                     20             
## R2                           0.96341370         
## Adjusted R2                  0.95655370         
## Residual Std. Error    67.67775000 (df = 16)    
## F Statistic         140.44060000*** (df = 3; 16)
## ================================================
## Note:                *p<0.1; **p<0.05; ***p<0.01

2. Objetos dentro del modelo lineal.

options(scipen = 9999)
modelo_lineal2$coefficients
## (Intercept)          X1          X2          X3 
## 303.5040143   2.3292746 -25.0711288   0.2861686

Matriz de varianza - Covarianza de los parametros \(V[\beta]\)

var_covar2<-vcov(modelo_lineal2)
print(var_covar)
##                  (Intercept)              X1                 X2
## (Intercept)  0.0063037218732  0.000240996434 -0.000000982806321
## X1           0.0002409964344  0.003086843196 -0.000001675537651
## X2          -0.0000009828063 -0.000001675538  0.000000001027106

2. Intervalos de confianza.

confint(object=modelo_lineal2, level = .95)
##                   2.5 %      97.5 %
## (Intercept) 151.8312499 455.1767786
## X1            1.3181175   3.3404318
## X2          -49.4179582  -0.7242993
## X3            0.1233324   0.4490047

2. valores ajustados de Y(hat)·

plot(modelo_lineal2$fitted.values, main = "Valores ajustados", 
     ylab = "Y", xlab = "X")

modelo_lineal2$fitted.values %>% as.matrix()
##         [,1]
## 1   340.3238
## 2   376.4442
## 3   410.0762
## 4   422.7825
## 5   456.7683
## 6   490.9729
## 7   561.2516
## 8   572.4839
## 9   661.8956
## 10  805.2546
## 11  743.9514
## 12  802.6063
## 13  921.3246
## 14 1038.5268
## 15  966.3846
## 16  967.1923
## 17 1087.4101
## 18 1280.2249
## 19 1349.9604
## 20 1214.1649

2. Residuos del modelo e^

plot(modelo_lineal2$residuals, main = "Residuos", ylab = "residuos",
     xlab = "casos")

modelo_lineal2$residuals %>% matrix()
##              [,1]
##  [1,]  -20.323767
##  [2,]   73.555820
##  [3,]  -40.076233
##  [4,]   47.217467
##  [5,]  -36.768268
##  [6,]    9.027138
##  [7,]    8.748419
##  [8,]   67.516125
##  [9,]    8.104393
## [10,]  -25.254613
## [11,]  -53.951414
## [12,] -102.606335
## [13,]  -11.324647
## [14,] -108.526815
## [15,]  -26.384626
## [16,]  102.807683
## [17,]   72.589856
## [18,]  -70.224936
## [19,]  100.039646
## [20,]    5.835106