# Import data set
dataset <-  read.csv("G:\\RStudio\\udemy\\ml\\Machine Learning AZ\\Part 2 - Regression\\Section 8 - Decision Tree Regression\\Decision_Tree_Regression\\Position_Salaries.csv")
# We want to use only columns 2 and 3
dataset <-  dataset[2:3]
head(dataset)
# Polynomial Regression
# taking care of missing values
# Test for missing values
sum(is.na(dataset$Level))
[1] 0
sum(is.na(dataset$Salary))
[1] 0

There is no missing values in the dataset.

The while the next step should be to split the dataset, since this dataset is small, we will use the full dataset.

# feature scaling
# in this case for polynomial, not needed for feature scaling. 
# Fitting the  regression model
# add a new column in the dataframe
# install.packages("rpart")
library(rpart)
package <U+393C><U+3E31>rpart<U+393C><U+3E32> was built under R version 3.3.3
regressor <-rpart(formula = Salary ~., data = dataset)
summary(regressor)
Call:
rpart(formula = Salary ~ ., data = dataset)
  n= 10 

    CP nsplit rel error xerror xstd
1 0.01      0         1      0    0

Node number 1: 10 observations
  mean=249500, MSE=8.066225e+10 
# predicting a new result 
y_pred <-  predict(regressor, data.frame(Level = 6.5))
y_pred
[1] 249500
# Plot
library(ggplot2)
ggplot() +
  geom_point(aes(x = dataset$Level, y = dataset$Salary), colour = "red") +
  geom_line(aes(x = dataset$Level, y = predict(regressor, newdata = dataset)), colour = "blue")+
  ggtitle("Truth or Bluff (Regression Model)") +
  xlab("Levels") +
  ylab("Salary")

# visualizing polynomial regression
regressor <-rpart(formula = Salary ~., data = dataset, control = rpart.control(minsplit = 1))
summary(regressor)
Call:
rpart(formula = Salary ~ ., data = dataset, control = rpart.control(minsplit = 1))
  n= 10 

          CP nsplit  rel error   xerror      xstd
1 0.77638626      0 1.00000000 1.234568 0.7835133
2 0.15496716      1 0.22361374 1.148378 0.7931845
3 0.05217357      2 0.06864658 1.120316 0.7968327
4 0.01000000      3 0.01647301 1.120316 0.7968327

Variable importance
Level 
  100 

Node number 1: 10 observations,    complexity param=0.7763863
  mean=249500, MSE=8.066225e+10 
  left son=2 (8 obs) right son=3 (2 obs)
  Primary splits:
      Level < 8.5 to the left,  improve=0.7763863, (0 missing)

Node number 2: 8 observations,    complexity param=0.05217357
  mean=124375, MSE=6.921484e+09 
  left son=4 (6 obs) right son=5 (2 obs)
  Primary splits:
      Level < 6.5 to the left,  improve=0.7600316, (0 missing)

Node number 3: 2 observations,    complexity param=0.1549672
  mean=750000, MSE=6.25e+10 
  left son=6 (1 obs) right son=7 (1 obs)
  Primary splits:
      Level < 9.5 to the left,  improve=1, (0 missing)

Node number 4: 6 observations
  mean=82500, MSE=1.38125e+09 

Node number 5: 2 observations
  mean=250000, MSE=2.5e+09 

Node number 6: 1 observations
  mean=500000, MSE=0 

Node number 7: 1 observations
  mean=1000000, MSE=0 
# predicting a new result 
y_pred <-  predict(regressor, data.frame(Level = 6.5))
y_pred
     1 
250000 
library(ggplot2)
ggplot() +
  geom_point(aes(x = dataset$Level, y = dataset$Salary), colour = "red") +
  geom_line(aes(x = dataset$Level, y = predict(regressor, newdata = dataset)), colour = "blue")+
  ggtitle("Truth or Bluff (Regression Model)") +
  xlab("Levels") +
  ylab("Salary")

# visualizing in higher resolution
x_grid = seq(min(dataset$Level), max(dataset$Level), 0.1 )
ggplot() +
  geom_point(aes(x = dataset$Level, y = dataset$Salary), colour = "red") +
  geom_line(aes(x = x_grid, y = predict(regressor, newdata = data.frame(Level = x_grid))), colour = "blue")+
  ggtitle("Truth or Bluff (Regression Model)") +
  xlab("Levels") +
  ylab("Salary")

LS0tDQp0aXRsZTogIk1MIFVzaW5nIFIgRGVjaXNpb24gVHJlZSBSZWdyZXNzaW9uIg0Kb3V0cHV0OiBodG1sX25vdGVib29rDQotLS0NCg0KYGBge3J9DQojIEltcG9ydCBkYXRhIHNldA0KZGF0YXNldCA8LSAgcmVhZC5jc3YoIkc6XFxSU3R1ZGlvXFx1ZGVteVxcbWxcXE1hY2hpbmUgTGVhcm5pbmcgQVpcXFBhcnQgMiAtIFJlZ3Jlc3Npb25cXFNlY3Rpb24gOCAtIERlY2lzaW9uIFRyZWUgUmVncmVzc2lvblxcRGVjaXNpb25fVHJlZV9SZWdyZXNzaW9uXFxQb3NpdGlvbl9TYWxhcmllcy5jc3YiKQ0KIyBXZSB3YW50IHRvIHVzZSBvbmx5IGNvbHVtbnMgMiBhbmQgMw0KZGF0YXNldCA8LSAgZGF0YXNldFsyOjNdDQpoZWFkKGRhdGFzZXQpDQpgYGANCg0KYGBge3J9DQojIFBvbHlub21pYWwgUmVncmVzc2lvbg0KIyB0YWtpbmcgY2FyZSBvZiBtaXNzaW5nIHZhbHVlcw0KIyBUZXN0IGZvciBtaXNzaW5nIHZhbHVlcw0Kc3VtKGlzLm5hKGRhdGFzZXQkTGV2ZWwpKQ0Kc3VtKGlzLm5hKGRhdGFzZXQkU2FsYXJ5KSkNCmBgYA0KDQpUaGVyZSBpcyBubyBtaXNzaW5nIHZhbHVlcyBpbiB0aGUgZGF0YXNldC4gDQoNClRoZSB3aGlsZSB0aGUgbmV4dCBzdGVwIHNob3VsZCBiZSB0byBzcGxpdCB0aGUgZGF0YXNldCwgc2luY2UgdGhpcyBkYXRhc2V0IGlzIHNtYWxsLCB3ZSB3aWxsIHVzZSB0aGUgZnVsbCBkYXRhc2V0LiANCg0KYGBge3J9DQojIGZlYXR1cmUgc2NhbGluZw0KIyBpbiB0aGlzIGNhc2UgZm9yIHBvbHlub21pYWwsIG5vdCBuZWVkZWQgZm9yIGZlYXR1cmUgc2NhbGluZy4gDQoNCmBgYA0KDQoNCg0KYGBge3J9DQojIEZpdHRpbmcgdGhlICByZWdyZXNzaW9uIG1vZGVsDQojIGFkZCBhIG5ldyBjb2x1bW4gaW4gdGhlIGRhdGFmcmFtZQ0KIyBpbnN0YWxsLnBhY2thZ2VzKCJycGFydCIpDQpsaWJyYXJ5KHJwYXJ0KQ0KcmVncmVzc29yIDwtcnBhcnQoZm9ybXVsYSA9IFNhbGFyeSB+LiwgZGF0YSA9IGRhdGFzZXQpDQpzdW1tYXJ5KHJlZ3Jlc3NvcikNCg0KIyBwcmVkaWN0aW5nIGEgbmV3IHJlc3VsdCANCnlfcHJlZCA8LSAgcHJlZGljdChyZWdyZXNzb3IsIGRhdGEuZnJhbWUoTGV2ZWwgPSA2LjUpKQ0KeV9wcmVkDQoNCg0KIyBQbG90DQpsaWJyYXJ5KGdncGxvdDIpDQpnZ3Bsb3QoKSArDQogIGdlb21fcG9pbnQoYWVzKHggPSBkYXRhc2V0JExldmVsLCB5ID0gZGF0YXNldCRTYWxhcnkpLCBjb2xvdXIgPSAicmVkIikgKw0KICBnZW9tX2xpbmUoYWVzKHggPSBkYXRhc2V0JExldmVsLCB5ID0gcHJlZGljdChyZWdyZXNzb3IsIG5ld2RhdGEgPSBkYXRhc2V0KSksIGNvbG91ciA9ICJibHVlIikrDQogIGdndGl0bGUoIlRydXRoIG9yIEJsdWZmIChSZWdyZXNzaW9uIE1vZGVsKSIpICsNCiAgeGxhYigiTGV2ZWxzIikgKw0KICB5bGFiKCJTYWxhcnkiKQ0KDQpgYGANCg0KDQpgYGB7cn0NCiMgdmlzdWFsaXppbmcgcG9seW5vbWlhbCByZWdyZXNzaW9uDQpyZWdyZXNzb3IgPC1ycGFydChmb3JtdWxhID0gU2FsYXJ5IH4uLCBkYXRhID0gZGF0YXNldCwgY29udHJvbCA9IHJwYXJ0LmNvbnRyb2wobWluc3BsaXQgPSAxKSkNCnN1bW1hcnkocmVncmVzc29yKQ0KDQojIHByZWRpY3RpbmcgYSBuZXcgcmVzdWx0IA0KeV9wcmVkIDwtICBwcmVkaWN0KHJlZ3Jlc3NvciwgZGF0YS5mcmFtZShMZXZlbCA9IDYuNSkpDQp5X3ByZWQNCmxpYnJhcnkoZ2dwbG90MikNCmdncGxvdCgpICsNCiAgZ2VvbV9wb2ludChhZXMoeCA9IGRhdGFzZXQkTGV2ZWwsIHkgPSBkYXRhc2V0JFNhbGFyeSksIGNvbG91ciA9ICJyZWQiKSArDQogIGdlb21fbGluZShhZXMoeCA9IGRhdGFzZXQkTGV2ZWwsIHkgPSBwcmVkaWN0KHJlZ3Jlc3NvciwgbmV3ZGF0YSA9IGRhdGFzZXQpKSwgY29sb3VyID0gImJsdWUiKSsNCiAgZ2d0aXRsZSgiVHJ1dGggb3IgQmx1ZmYgKFJlZ3Jlc3Npb24gTW9kZWwpIikgKw0KICB4bGFiKCJMZXZlbHMiKSArDQogIHlsYWIoIlNhbGFyeSIpDQoNCmBgYA0KDQoNCmBgYHtyfQ0KIyB2aXN1YWxpemluZyBpbiBoaWdoZXIgcmVzb2x1dGlvbg0KeF9ncmlkID0gc2VxKG1pbihkYXRhc2V0JExldmVsKSwgbWF4KGRhdGFzZXQkTGV2ZWwpLCAwLjEgKQ0KZ2dwbG90KCkgKw0KICBnZW9tX3BvaW50KGFlcyh4ID0gZGF0YXNldCRMZXZlbCwgeSA9IGRhdGFzZXQkU2FsYXJ5KSwgY29sb3VyID0gInJlZCIpICsNCiAgZ2VvbV9saW5lKGFlcyh4ID0geF9ncmlkLCB5ID0gcHJlZGljdChyZWdyZXNzb3IsIG5ld2RhdGEgPSBkYXRhLmZyYW1lKExldmVsID0geF9ncmlkKSkpLCBjb2xvdXIgPSAiYmx1ZSIpKw0KICBnZ3RpdGxlKCJUcnV0aCBvciBCbHVmZiAoUmVncmVzc2lvbiBNb2RlbCkiKSArDQogIHhsYWIoIkxldmVscyIpICsNCiAgeWxhYigiU2FsYXJ5IikNCmBgYA0KDQoNCg==