library(quantmod)
## Loading required package: xts
## Loading required package: zoo
## 
## Attaching package: 'zoo'
## 
## The following objects are masked from 'package:base':
## 
##     as.Date, as.Date.numeric
## 
## Loading required package: TTR
## Version 0.4-0 included new data defaults. See ?getSymbols.
library(caret)
## Loading required package: lattice
## Loading required package: ggplot2
startDate = as.Date("2010-01-01")

endDate = as.Date("2015-12-31") 


startDate = as.Date("2010-01-01")

endDate = as.Date("2015-12-31") 

getSymbols("DJIA", src = "yahoo", from = startDate, to = endDate) 
##     As of 0.4-0, 'getSymbols' uses env=parent.frame() and
##  auto.assign=TRUE by default.
## 
##  This  behavior  will be  phased out in 0.5-0  when the call  will
##  default to use auto.assign=FALSE. getOption("getSymbols.env") and 
##  getOptions("getSymbols.auto.assign") are now checked for alternate defaults
## 
##  This message is shown once per session and may be disabled by setting 
##  options("getSymbols.warning4.0"=FALSE). See ?getSymbols for more details.
## [1] "DJIA"
RSI3<-RSI(Op(DJIA), n= 3) 
#Calculate a 3-period relative strength index (RSI) off the open price

EMA5<-EMA(Op(DJIA),n=5) 
#Calculate a 5-period exponential moving average (EMA)
EMAcross<- Op(DJIA)-EMA5 
#Let’s explore the difference between the open price and our 5-period EMA


DEMA10<-DEMA(Cl(DJIA),n = 10, v = 1, wilder = FALSE)
DEMA10c<-Cl(DJIA) - DEMA10

MACD<-MACD(Op(DJIA),fast = 12, slow = 26, signal = 9) 
#Calculate a MACD with standard parameters

MACDsignal<-MACD[,2] 
#Grab just the signal line to use as our indicator.


SMI<-SMI(Op(DJIA),n=13,slow=25,fast=2,signal=9) 
#Stochastic Oscillator with standard parameters
SMI<-SMI[,1] 
#Grab just the oscillator to use as our indicator

BB<-BBands(Op(DJIA),n=20,sd=2)
BBp<-BB[,4]


CCI20<-CCI(DJIA[,3:5],n=20)
#A 20-period Commodity Channel Index calculated of the High/Low/Close of our data



# Return sign creation 

ClosingPrice<-Cl(DJIA)

Trend<-diff(ClosingPrice, lag = 1, differences = 1, arithmetic = TRUE, log = FALSE, na.pad = TRUE)

 
#Calculate the difference between the close price at T and close  price T-1
Class<-ifelse(Trend>0,"UP","DOWN") 
#Create a binary classification variable, the variable we are trying to predict.

DJIADF<-data.frame(date = index(DJIA),DJIA, row.names=NULL)
library(wikipediatrend)


views1<-wp_trend(page ="The Home Depot" ,from =startDate ,to =endDate ,lang = "en",friendly = TRUE,requestFrom = "wp.trend.tester at wptt.wptt",userAgent = TRUE)
## Option 'requestFrom' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             Check wp_http_header() to know which information are send to 
##             stats.grok.se (R and package versions)
##             
## Option 'friendly' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             The package now is friendly by default.
##             
## Option 'userAgent' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             Check wp_http_header() to know which information are send to 
##             stats.grok.se (R and package versions)
##             
## http://stats.grok.se/json/en/201001/The_Home%20Depot
## http://stats.grok.se/json/en/201002/The_Home%20Depot
## http://stats.grok.se/json/en/201003/The_Home%20Depot
## http://stats.grok.se/json/en/201004/The_Home%20Depot
## http://stats.grok.se/json/en/201005/The_Home%20Depot
## http://stats.grok.se/json/en/201006/The_Home%20Depot
## http://stats.grok.se/json/en/201007/The_Home%20Depot
## http://stats.grok.se/json/en/201008/The_Home%20Depot
## http://stats.grok.se/json/en/201009/The_Home%20Depot
## http://stats.grok.se/json/en/201010/The_Home%20Depot
## http://stats.grok.se/json/en/201011/The_Home%20Depot
## http://stats.grok.se/json/en/201012/The_Home%20Depot
## http://stats.grok.se/json/en/201101/The_Home%20Depot
## http://stats.grok.se/json/en/201102/The_Home%20Depot
## http://stats.grok.se/json/en/201103/The_Home%20Depot
## http://stats.grok.se/json/en/201104/The_Home%20Depot
## http://stats.grok.se/json/en/201105/The_Home%20Depot
## http://stats.grok.se/json/en/201106/The_Home%20Depot
## http://stats.grok.se/json/en/201107/The_Home%20Depot
## http://stats.grok.se/json/en/201108/The_Home%20Depot
## http://stats.grok.se/json/en/201109/The_Home%20Depot
## http://stats.grok.se/json/en/201110/The_Home%20Depot
## http://stats.grok.se/json/en/201111/The_Home%20Depot
## http://stats.grok.se/json/en/201112/The_Home%20Depot
## http://stats.grok.se/json/en/201201/The_Home%20Depot
## http://stats.grok.se/json/en/201202/The_Home%20Depot
## http://stats.grok.se/json/en/201203/The_Home%20Depot
## http://stats.grok.se/json/en/201204/The_Home%20Depot
## http://stats.grok.se/json/en/201205/The_Home%20Depot
## http://stats.grok.se/json/en/201206/The_Home%20Depot
## http://stats.grok.se/json/en/201207/The_Home%20Depot
## http://stats.grok.se/json/en/201208/The_Home%20Depot
## http://stats.grok.se/json/en/201209/The_Home%20Depot
## http://stats.grok.se/json/en/201210/The_Home%20Depot
## http://stats.grok.se/json/en/201211/The_Home%20Depot
## http://stats.grok.se/json/en/201212/The_Home%20Depot
## http://stats.grok.se/json/en/201301/The_Home%20Depot
## http://stats.grok.se/json/en/201302/The_Home%20Depot
## http://stats.grok.se/json/en/201303/The_Home%20Depot
## http://stats.grok.se/json/en/201304/The_Home%20Depot
## http://stats.grok.se/json/en/201305/The_Home%20Depot
## http://stats.grok.se/json/en/201306/The_Home%20Depot
## http://stats.grok.se/json/en/201307/The_Home%20Depot
## http://stats.grok.se/json/en/201308/The_Home%20Depot
## http://stats.grok.se/json/en/201309/The_Home%20Depot
## http://stats.grok.se/json/en/201310/The_Home%20Depot
## http://stats.grok.se/json/en/201311/The_Home%20Depot
## http://stats.grok.se/json/en/201312/The_Home%20Depot
## http://stats.grok.se/json/en/201401/The_Home%20Depot
## http://stats.grok.se/json/en/201402/The_Home%20Depot
## http://stats.grok.se/json/en/201403/The_Home%20Depot
## http://stats.grok.se/json/en/201404/The_Home%20Depot
## http://stats.grok.se/json/en/201405/The_Home%20Depot
## http://stats.grok.se/json/en/201406/The_Home%20Depot
## http://stats.grok.se/json/en/201407/The_Home%20Depot
## http://stats.grok.se/json/en/201408/The_Home%20Depot
## http://stats.grok.se/json/en/201409/The_Home%20Depot
## http://stats.grok.se/json/en/201410/The_Home%20Depot
## http://stats.grok.se/json/en/201411/The_Home%20Depot
## http://stats.grok.se/json/en/201412/The_Home%20Depot
## http://stats.grok.se/json/en/201501/The_Home%20Depot
## http://stats.grok.se/json/en/201502/The_Home%20Depot
## http://stats.grok.se/json/en/201503/The_Home%20Depot
## http://stats.grok.se/json/en/201504/The_Home%20Depot
## http://stats.grok.se/json/en/201505/The_Home%20Depot
## http://stats.grok.se/json/en/201506/The_Home%20Depot
## http://stats.grok.se/json/en/201507/The_Home%20Depot
## http://stats.grok.se/json/en/201508/The_Home%20Depot
## http://stats.grok.se/json/en/201509/The_Home%20Depot
## http://stats.grok.se/json/en/201510/The_Home%20Depot
## http://stats.grok.se/json/en/201511/The_Home%20Depot
## http://stats.grok.se/json/en/201512/The_Home%20Depot
views2<-wp_trend(page ="ExxonMobil" ,from =startDate ,to =endDate ,lang = "en",friendly = TRUE,requestFrom = "wp.trend.tester at wptt.wptt",userAgent = TRUE)
## Option 'requestFrom' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             Check wp_http_header() to know which information are send to 
##             stats.grok.se (R and package versions)
##             
## Option 'friendly' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             The package now is friendly by default.
##             
## Option 'userAgent' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             Check wp_http_header() to know which information are send to 
##             stats.grok.se (R and package versions)
##             
## http://stats.grok.se/json/en/201001/ExxonMobil
## http://stats.grok.se/json/en/201002/ExxonMobil
## http://stats.grok.se/json/en/201003/ExxonMobil
## http://stats.grok.se/json/en/201004/ExxonMobil
## http://stats.grok.se/json/en/201005/ExxonMobil
## http://stats.grok.se/json/en/201006/ExxonMobil
## http://stats.grok.se/json/en/201007/ExxonMobil
## http://stats.grok.se/json/en/201008/ExxonMobil
## http://stats.grok.se/json/en/201009/ExxonMobil
## http://stats.grok.se/json/en/201010/ExxonMobil
## http://stats.grok.se/json/en/201011/ExxonMobil
## http://stats.grok.se/json/en/201012/ExxonMobil
## http://stats.grok.se/json/en/201101/ExxonMobil
## http://stats.grok.se/json/en/201102/ExxonMobil
## http://stats.grok.se/json/en/201103/ExxonMobil
## http://stats.grok.se/json/en/201104/ExxonMobil
## http://stats.grok.se/json/en/201105/ExxonMobil
## http://stats.grok.se/json/en/201106/ExxonMobil
## http://stats.grok.se/json/en/201107/ExxonMobil
## http://stats.grok.se/json/en/201108/ExxonMobil
## http://stats.grok.se/json/en/201109/ExxonMobil
## http://stats.grok.se/json/en/201110/ExxonMobil
## http://stats.grok.se/json/en/201111/ExxonMobil
## http://stats.grok.se/json/en/201112/ExxonMobil
## http://stats.grok.se/json/en/201201/ExxonMobil
## http://stats.grok.se/json/en/201202/ExxonMobil
## http://stats.grok.se/json/en/201203/ExxonMobil
## http://stats.grok.se/json/en/201204/ExxonMobil
## http://stats.grok.se/json/en/201205/ExxonMobil
## http://stats.grok.se/json/en/201206/ExxonMobil
## http://stats.grok.se/json/en/201207/ExxonMobil
## http://stats.grok.se/json/en/201208/ExxonMobil
## http://stats.grok.se/json/en/201209/ExxonMobil
## http://stats.grok.se/json/en/201210/ExxonMobil
## http://stats.grok.se/json/en/201211/ExxonMobil
## http://stats.grok.se/json/en/201212/ExxonMobil
## http://stats.grok.se/json/en/201301/ExxonMobil
## http://stats.grok.se/json/en/201302/ExxonMobil
## http://stats.grok.se/json/en/201303/ExxonMobil
## http://stats.grok.se/json/en/201304/ExxonMobil
## http://stats.grok.se/json/en/201305/ExxonMobil
## http://stats.grok.se/json/en/201306/ExxonMobil
## http://stats.grok.se/json/en/201307/ExxonMobil
## http://stats.grok.se/json/en/201308/ExxonMobil
## http://stats.grok.se/json/en/201309/ExxonMobil
## http://stats.grok.se/json/en/201310/ExxonMobil
## http://stats.grok.se/json/en/201311/ExxonMobil
## http://stats.grok.se/json/en/201312/ExxonMobil
## http://stats.grok.se/json/en/201401/ExxonMobil
## http://stats.grok.se/json/en/201402/ExxonMobil
## http://stats.grok.se/json/en/201403/ExxonMobil
## http://stats.grok.se/json/en/201404/ExxonMobil
## http://stats.grok.se/json/en/201405/ExxonMobil
## http://stats.grok.se/json/en/201406/ExxonMobil
## http://stats.grok.se/json/en/201407/ExxonMobil
## http://stats.grok.se/json/en/201408/ExxonMobil
## http://stats.grok.se/json/en/201409/ExxonMobil
## http://stats.grok.se/json/en/201410/ExxonMobil
## http://stats.grok.se/json/en/201411/ExxonMobil
## http://stats.grok.se/json/en/201412/ExxonMobil
## http://stats.grok.se/json/en/201501/ExxonMobil
## http://stats.grok.se/json/en/201502/ExxonMobil
## http://stats.grok.se/json/en/201503/ExxonMobil
## http://stats.grok.se/json/en/201504/ExxonMobil
## http://stats.grok.se/json/en/201505/ExxonMobil
## http://stats.grok.se/json/en/201506/ExxonMobil
## http://stats.grok.se/json/en/201507/ExxonMobil
## http://stats.grok.se/json/en/201508/ExxonMobil
## http://stats.grok.se/json/en/201509/ExxonMobil
## http://stats.grok.se/json/en/201510/ExxonMobil
## http://stats.grok.se/json/en/201511/ExxonMobil
## http://stats.grok.se/json/en/201512/ExxonMobil
views3<-wp_trend(page ="DuPont" ,from =startDate ,to =endDate ,lang = "en",friendly = TRUE,requestFrom = "wp.trend.tester at wptt.wptt",userAgent = TRUE)
## Option 'requestFrom' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             Check wp_http_header() to know which information are send to 
##             stats.grok.se (R and package versions)
##             
## Option 'friendly' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             The package now is friendly by default.
##             
## Option 'userAgent' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             Check wp_http_header() to know which information are send to 
##             stats.grok.se (R and package versions)
##             
## http://stats.grok.se/json/en/201001/DuPont
## http://stats.grok.se/json/en/201002/DuPont
## http://stats.grok.se/json/en/201003/DuPont
## http://stats.grok.se/json/en/201004/DuPont
## http://stats.grok.se/json/en/201005/DuPont
## http://stats.grok.se/json/en/201006/DuPont
## http://stats.grok.se/json/en/201007/DuPont
## http://stats.grok.se/json/en/201008/DuPont
## http://stats.grok.se/json/en/201009/DuPont
## http://stats.grok.se/json/en/201010/DuPont
## http://stats.grok.se/json/en/201011/DuPont
## http://stats.grok.se/json/en/201012/DuPont
## http://stats.grok.se/json/en/201101/DuPont
## http://stats.grok.se/json/en/201102/DuPont
## http://stats.grok.se/json/en/201103/DuPont
## http://stats.grok.se/json/en/201104/DuPont
## http://stats.grok.se/json/en/201105/DuPont
## http://stats.grok.se/json/en/201106/DuPont
## http://stats.grok.se/json/en/201107/DuPont
## http://stats.grok.se/json/en/201108/DuPont
## http://stats.grok.se/json/en/201109/DuPont
## http://stats.grok.se/json/en/201110/DuPont
## http://stats.grok.se/json/en/201111/DuPont
## http://stats.grok.se/json/en/201112/DuPont
## http://stats.grok.se/json/en/201201/DuPont
## http://stats.grok.se/json/en/201202/DuPont
## http://stats.grok.se/json/en/201203/DuPont
## http://stats.grok.se/json/en/201204/DuPont
## http://stats.grok.se/json/en/201205/DuPont
## http://stats.grok.se/json/en/201206/DuPont
## http://stats.grok.se/json/en/201207/DuPont
## http://stats.grok.se/json/en/201208/DuPont
## http://stats.grok.se/json/en/201209/DuPont
## http://stats.grok.se/json/en/201210/DuPont
## http://stats.grok.se/json/en/201211/DuPont
## http://stats.grok.se/json/en/201212/DuPont
## http://stats.grok.se/json/en/201301/DuPont
## http://stats.grok.se/json/en/201302/DuPont
## http://stats.grok.se/json/en/201303/DuPont
## http://stats.grok.se/json/en/201304/DuPont
## http://stats.grok.se/json/en/201305/DuPont
## http://stats.grok.se/json/en/201306/DuPont
## http://stats.grok.se/json/en/201307/DuPont
## http://stats.grok.se/json/en/201308/DuPont
## http://stats.grok.se/json/en/201309/DuPont
## http://stats.grok.se/json/en/201310/DuPont
## http://stats.grok.se/json/en/201311/DuPont
## http://stats.grok.se/json/en/201312/DuPont
## http://stats.grok.se/json/en/201401/DuPont
## http://stats.grok.se/json/en/201402/DuPont
## http://stats.grok.se/json/en/201403/DuPont
## http://stats.grok.se/json/en/201404/DuPont
## http://stats.grok.se/json/en/201405/DuPont
## http://stats.grok.se/json/en/201406/DuPont
## http://stats.grok.se/json/en/201407/DuPont
## http://stats.grok.se/json/en/201408/DuPont
## http://stats.grok.se/json/en/201409/DuPont
## http://stats.grok.se/json/en/201410/DuPont
## http://stats.grok.se/json/en/201411/DuPont
## http://stats.grok.se/json/en/201412/DuPont
## http://stats.grok.se/json/en/201501/DuPont
## http://stats.grok.se/json/en/201502/DuPont
## http://stats.grok.se/json/en/201503/DuPont
## http://stats.grok.se/json/en/201504/DuPont
## http://stats.grok.se/json/en/201505/DuPont
## http://stats.grok.se/json/en/201506/DuPont
## http://stats.grok.se/json/en/201507/DuPont
## http://stats.grok.se/json/en/201508/DuPont
## http://stats.grok.se/json/en/201509/DuPont
## http://stats.grok.se/json/en/201510/DuPont
## http://stats.grok.se/json/en/201511/DuPont
## http://stats.grok.se/json/en/201512/DuPont
views4<-wp_trend(page ="3M" ,from =startDate ,to =endDate ,lang = "en",friendly = TRUE,requestFrom = "wp.trend.tester at wptt.wptt",userAgent = TRUE)
## Option 'requestFrom' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             Check wp_http_header() to know which information are send to 
##             stats.grok.se (R and package versions)
##             
## Option 'friendly' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             The package now is friendly by default.
##             
## Option 'userAgent' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             Check wp_http_header() to know which information are send to 
##             stats.grok.se (R and package versions)
##             
## http://stats.grok.se/json/en/201001/3M
## http://stats.grok.se/json/en/201002/3M
## http://stats.grok.se/json/en/201003/3M
## http://stats.grok.se/json/en/201004/3M
## http://stats.grok.se/json/en/201005/3M
## http://stats.grok.se/json/en/201006/3M
## http://stats.grok.se/json/en/201007/3M
## http://stats.grok.se/json/en/201008/3M
## http://stats.grok.se/json/en/201009/3M
## http://stats.grok.se/json/en/201010/3M
## http://stats.grok.se/json/en/201011/3M
## http://stats.grok.se/json/en/201012/3M
## http://stats.grok.se/json/en/201101/3M
## http://stats.grok.se/json/en/201102/3M
## http://stats.grok.se/json/en/201103/3M
## http://stats.grok.se/json/en/201104/3M
## http://stats.grok.se/json/en/201105/3M
## http://stats.grok.se/json/en/201106/3M
## http://stats.grok.se/json/en/201107/3M
## http://stats.grok.se/json/en/201108/3M
## http://stats.grok.se/json/en/201109/3M
## http://stats.grok.se/json/en/201110/3M
## http://stats.grok.se/json/en/201111/3M
## http://stats.grok.se/json/en/201112/3M
## http://stats.grok.se/json/en/201201/3M
## http://stats.grok.se/json/en/201202/3M
## http://stats.grok.se/json/en/201203/3M
## http://stats.grok.se/json/en/201204/3M
## http://stats.grok.se/json/en/201205/3M
## http://stats.grok.se/json/en/201206/3M
## http://stats.grok.se/json/en/201207/3M
## http://stats.grok.se/json/en/201208/3M
## http://stats.grok.se/json/en/201209/3M
## http://stats.grok.se/json/en/201210/3M
## http://stats.grok.se/json/en/201211/3M
## http://stats.grok.se/json/en/201212/3M
## http://stats.grok.se/json/en/201301/3M
## http://stats.grok.se/json/en/201302/3M
## http://stats.grok.se/json/en/201303/3M
## http://stats.grok.se/json/en/201304/3M
## http://stats.grok.se/json/en/201305/3M
## http://stats.grok.se/json/en/201306/3M
## http://stats.grok.se/json/en/201307/3M
## http://stats.grok.se/json/en/201308/3M
## http://stats.grok.se/json/en/201309/3M
## http://stats.grok.se/json/en/201310/3M
## http://stats.grok.se/json/en/201311/3M
## http://stats.grok.se/json/en/201312/3M
## http://stats.grok.se/json/en/201401/3M
## http://stats.grok.se/json/en/201402/3M
## http://stats.grok.se/json/en/201403/3M
## http://stats.grok.se/json/en/201404/3M
## http://stats.grok.se/json/en/201405/3M
## http://stats.grok.se/json/en/201406/3M
## http://stats.grok.se/json/en/201407/3M
## http://stats.grok.se/json/en/201408/3M
## http://stats.grok.se/json/en/201409/3M
## http://stats.grok.se/json/en/201410/3M
## http://stats.grok.se/json/en/201411/3M
## http://stats.grok.se/json/en/201412/3M
## http://stats.grok.se/json/en/201501/3M
## http://stats.grok.se/json/en/201502/3M
## http://stats.grok.se/json/en/201503/3M
## http://stats.grok.se/json/en/201504/3M
## http://stats.grok.se/json/en/201505/3M
## http://stats.grok.se/json/en/201506/3M
## http://stats.grok.se/json/en/201507/3M
## http://stats.grok.se/json/en/201508/3M
## http://stats.grok.se/json/en/201509/3M
## http://stats.grok.se/json/en/201510/3M
## http://stats.grok.se/json/en/201511/3M
## http://stats.grok.se/json/en/201512/3M
views6<-wp_trend(page ="Procter & Gamble" ,from =startDate ,to =endDate ,lang = "en",friendly = TRUE,requestFrom = "wp.trend.tester at wptt.wptt",userAgent = TRUE)
## Option 'requestFrom' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             Check wp_http_header() to know which information are send to 
##             stats.grok.se (R and package versions)
##             
## Option 'friendly' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             The package now is friendly by default.
##             
## Option 'userAgent' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             Check wp_http_header() to know which information are send to 
##             stats.grok.se (R and package versions)
##             
## http://stats.grok.se/json/en/201001/Procter_&%20Gamble
## http://stats.grok.se/json/en/201002/Procter_&%20Gamble
## http://stats.grok.se/json/en/201003/Procter_&%20Gamble
## http://stats.grok.se/json/en/201004/Procter_&%20Gamble
## http://stats.grok.se/json/en/201005/Procter_&%20Gamble
## http://stats.grok.se/json/en/201006/Procter_&%20Gamble
## http://stats.grok.se/json/en/201007/Procter_&%20Gamble
## http://stats.grok.se/json/en/201008/Procter_&%20Gamble
## http://stats.grok.se/json/en/201009/Procter_&%20Gamble
## http://stats.grok.se/json/en/201010/Procter_&%20Gamble
## http://stats.grok.se/json/en/201011/Procter_&%20Gamble
## http://stats.grok.se/json/en/201012/Procter_&%20Gamble
## http://stats.grok.se/json/en/201101/Procter_&%20Gamble
## http://stats.grok.se/json/en/201102/Procter_&%20Gamble
## http://stats.grok.se/json/en/201103/Procter_&%20Gamble
## http://stats.grok.se/json/en/201104/Procter_&%20Gamble
## http://stats.grok.se/json/en/201105/Procter_&%20Gamble
## http://stats.grok.se/json/en/201106/Procter_&%20Gamble
## http://stats.grok.se/json/en/201107/Procter_&%20Gamble
## http://stats.grok.se/json/en/201108/Procter_&%20Gamble
## http://stats.grok.se/json/en/201109/Procter_&%20Gamble
## http://stats.grok.se/json/en/201110/Procter_&%20Gamble
## http://stats.grok.se/json/en/201111/Procter_&%20Gamble
## http://stats.grok.se/json/en/201112/Procter_&%20Gamble
## http://stats.grok.se/json/en/201201/Procter_&%20Gamble
## http://stats.grok.se/json/en/201202/Procter_&%20Gamble
## http://stats.grok.se/json/en/201203/Procter_&%20Gamble
## http://stats.grok.se/json/en/201204/Procter_&%20Gamble
## http://stats.grok.se/json/en/201205/Procter_&%20Gamble
## http://stats.grok.se/json/en/201206/Procter_&%20Gamble
## http://stats.grok.se/json/en/201207/Procter_&%20Gamble
## http://stats.grok.se/json/en/201208/Procter_&%20Gamble
## http://stats.grok.se/json/en/201209/Procter_&%20Gamble
## http://stats.grok.se/json/en/201210/Procter_&%20Gamble
## http://stats.grok.se/json/en/201211/Procter_&%20Gamble
## http://stats.grok.se/json/en/201212/Procter_&%20Gamble
## http://stats.grok.se/json/en/201301/Procter_&%20Gamble
## http://stats.grok.se/json/en/201302/Procter_&%20Gamble
## http://stats.grok.se/json/en/201303/Procter_&%20Gamble
## http://stats.grok.se/json/en/201304/Procter_&%20Gamble
## http://stats.grok.se/json/en/201305/Procter_&%20Gamble
## http://stats.grok.se/json/en/201306/Procter_&%20Gamble
## http://stats.grok.se/json/en/201307/Procter_&%20Gamble
## http://stats.grok.se/json/en/201308/Procter_&%20Gamble
## http://stats.grok.se/json/en/201309/Procter_&%20Gamble
## http://stats.grok.se/json/en/201310/Procter_&%20Gamble
## http://stats.grok.se/json/en/201311/Procter_&%20Gamble
## http://stats.grok.se/json/en/201312/Procter_&%20Gamble
## http://stats.grok.se/json/en/201401/Procter_&%20Gamble
## http://stats.grok.se/json/en/201402/Procter_&%20Gamble
## http://stats.grok.se/json/en/201403/Procter_&%20Gamble
## http://stats.grok.se/json/en/201404/Procter_&%20Gamble
## http://stats.grok.se/json/en/201405/Procter_&%20Gamble
## http://stats.grok.se/json/en/201406/Procter_&%20Gamble
## http://stats.grok.se/json/en/201407/Procter_&%20Gamble
## http://stats.grok.se/json/en/201408/Procter_&%20Gamble
## http://stats.grok.se/json/en/201409/Procter_&%20Gamble
## http://stats.grok.se/json/en/201410/Procter_&%20Gamble
## http://stats.grok.se/json/en/201411/Procter_&%20Gamble
## http://stats.grok.se/json/en/201412/Procter_&%20Gamble
## http://stats.grok.se/json/en/201501/Procter_&%20Gamble
## http://stats.grok.se/json/en/201502/Procter_&%20Gamble
## http://stats.grok.se/json/en/201503/Procter_&%20Gamble
## http://stats.grok.se/json/en/201504/Procter_&%20Gamble
## http://stats.grok.se/json/en/201505/Procter_&%20Gamble
## http://stats.grok.se/json/en/201506/Procter_&%20Gamble
## http://stats.grok.se/json/en/201507/Procter_&%20Gamble
## http://stats.grok.se/json/en/201508/Procter_&%20Gamble
## http://stats.grok.se/json/en/201509/Procter_&%20Gamble
## http://stats.grok.se/json/en/201510/Procter_&%20Gamble
## http://stats.grok.se/json/en/201511/Procter_&%20Gamble
## http://stats.grok.se/json/en/201512/Procter_&%20Gamble
views7<-wp_trend(page ="Goldman Sachs" ,from =startDate ,to =endDate ,lang = "en",friendly = TRUE,requestFrom = "wp.trend.tester at wptt.wptt",userAgent = TRUE)
## Option 'requestFrom' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             Check wp_http_header() to know which information are send to 
##             stats.grok.se (R and package versions)
##             
## Option 'friendly' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             The package now is friendly by default.
##             
## Option 'userAgent' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             Check wp_http_header() to know which information are send to 
##             stats.grok.se (R and package versions)
##             
## http://stats.grok.se/json/en/201001/Goldman_Sachs
## http://stats.grok.se/json/en/201002/Goldman_Sachs
## http://stats.grok.se/json/en/201003/Goldman_Sachs
## http://stats.grok.se/json/en/201004/Goldman_Sachs
## http://stats.grok.se/json/en/201005/Goldman_Sachs
## http://stats.grok.se/json/en/201006/Goldman_Sachs
## http://stats.grok.se/json/en/201007/Goldman_Sachs
## http://stats.grok.se/json/en/201008/Goldman_Sachs
## http://stats.grok.se/json/en/201009/Goldman_Sachs
## http://stats.grok.se/json/en/201010/Goldman_Sachs
## http://stats.grok.se/json/en/201011/Goldman_Sachs
## http://stats.grok.se/json/en/201012/Goldman_Sachs
## http://stats.grok.se/json/en/201101/Goldman_Sachs
## http://stats.grok.se/json/en/201102/Goldman_Sachs
## http://stats.grok.se/json/en/201103/Goldman_Sachs
## http://stats.grok.se/json/en/201104/Goldman_Sachs
## http://stats.grok.se/json/en/201105/Goldman_Sachs
## http://stats.grok.se/json/en/201106/Goldman_Sachs
## http://stats.grok.se/json/en/201107/Goldman_Sachs
## http://stats.grok.se/json/en/201108/Goldman_Sachs
## http://stats.grok.se/json/en/201109/Goldman_Sachs
## http://stats.grok.se/json/en/201110/Goldman_Sachs
## http://stats.grok.se/json/en/201111/Goldman_Sachs
## http://stats.grok.se/json/en/201112/Goldman_Sachs
## http://stats.grok.se/json/en/201201/Goldman_Sachs
## http://stats.grok.se/json/en/201202/Goldman_Sachs
## http://stats.grok.se/json/en/201203/Goldman_Sachs
## http://stats.grok.se/json/en/201204/Goldman_Sachs
## http://stats.grok.se/json/en/201205/Goldman_Sachs
## http://stats.grok.se/json/en/201206/Goldman_Sachs
## http://stats.grok.se/json/en/201207/Goldman_Sachs
## http://stats.grok.se/json/en/201208/Goldman_Sachs
## http://stats.grok.se/json/en/201209/Goldman_Sachs
## http://stats.grok.se/json/en/201210/Goldman_Sachs
## http://stats.grok.se/json/en/201211/Goldman_Sachs
## http://stats.grok.se/json/en/201212/Goldman_Sachs
## http://stats.grok.se/json/en/201301/Goldman_Sachs
## http://stats.grok.se/json/en/201302/Goldman_Sachs
## http://stats.grok.se/json/en/201303/Goldman_Sachs
## http://stats.grok.se/json/en/201304/Goldman_Sachs
## http://stats.grok.se/json/en/201305/Goldman_Sachs
## http://stats.grok.se/json/en/201306/Goldman_Sachs
## http://stats.grok.se/json/en/201307/Goldman_Sachs
## http://stats.grok.se/json/en/201308/Goldman_Sachs
## http://stats.grok.se/json/en/201309/Goldman_Sachs
## http://stats.grok.se/json/en/201310/Goldman_Sachs
## http://stats.grok.se/json/en/201311/Goldman_Sachs
## http://stats.grok.se/json/en/201312/Goldman_Sachs
## http://stats.grok.se/json/en/201401/Goldman_Sachs
## http://stats.grok.se/json/en/201402/Goldman_Sachs
## http://stats.grok.se/json/en/201403/Goldman_Sachs
## http://stats.grok.se/json/en/201404/Goldman_Sachs
## http://stats.grok.se/json/en/201405/Goldman_Sachs
## http://stats.grok.se/json/en/201406/Goldman_Sachs
## http://stats.grok.se/json/en/201407/Goldman_Sachs
## http://stats.grok.se/json/en/201408/Goldman_Sachs
## http://stats.grok.se/json/en/201409/Goldman_Sachs
## http://stats.grok.se/json/en/201410/Goldman_Sachs
## http://stats.grok.se/json/en/201411/Goldman_Sachs
## http://stats.grok.se/json/en/201412/Goldman_Sachs
## http://stats.grok.se/json/en/201501/Goldman_Sachs
## http://stats.grok.se/json/en/201502/Goldman_Sachs
## http://stats.grok.se/json/en/201503/Goldman_Sachs
## http://stats.grok.se/json/en/201504/Goldman_Sachs
## http://stats.grok.se/json/en/201505/Goldman_Sachs
## http://stats.grok.se/json/en/201506/Goldman_Sachs
## http://stats.grok.se/json/en/201507/Goldman_Sachs
## http://stats.grok.se/json/en/201508/Goldman_Sachs
## http://stats.grok.se/json/en/201509/Goldman_Sachs
## http://stats.grok.se/json/en/201510/Goldman_Sachs
## http://stats.grok.se/json/en/201511/Goldman_Sachs
## http://stats.grok.se/json/en/201512/Goldman_Sachs
views8<-wp_trend(page ="Cisco Systems" ,from =startDate ,to =endDate ,lang = "en",friendly = TRUE,requestFrom = "wp.trend.tester at wptt.wptt",userAgent = TRUE)
## Option 'requestFrom' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             Check wp_http_header() to know which information are send to 
##             stats.grok.se (R and package versions)
##             
## Option 'friendly' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             The package now is friendly by default.
##             
## Option 'userAgent' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             Check wp_http_header() to know which information are send to 
##             stats.grok.se (R and package versions)
##             
## http://stats.grok.se/json/en/201001/Cisco_Systems
## http://stats.grok.se/json/en/201002/Cisco_Systems
## http://stats.grok.se/json/en/201003/Cisco_Systems
## http://stats.grok.se/json/en/201004/Cisco_Systems
## http://stats.grok.se/json/en/201005/Cisco_Systems
## http://stats.grok.se/json/en/201006/Cisco_Systems
## http://stats.grok.se/json/en/201007/Cisco_Systems
## http://stats.grok.se/json/en/201008/Cisco_Systems
## http://stats.grok.se/json/en/201009/Cisco_Systems
## http://stats.grok.se/json/en/201010/Cisco_Systems
## http://stats.grok.se/json/en/201011/Cisco_Systems
## http://stats.grok.se/json/en/201012/Cisco_Systems
## http://stats.grok.se/json/en/201101/Cisco_Systems
## http://stats.grok.se/json/en/201102/Cisco_Systems
## http://stats.grok.se/json/en/201103/Cisco_Systems
## http://stats.grok.se/json/en/201104/Cisco_Systems
## http://stats.grok.se/json/en/201105/Cisco_Systems
## http://stats.grok.se/json/en/201106/Cisco_Systems
## http://stats.grok.se/json/en/201107/Cisco_Systems
## http://stats.grok.se/json/en/201108/Cisco_Systems
## http://stats.grok.se/json/en/201109/Cisco_Systems
## http://stats.grok.se/json/en/201110/Cisco_Systems
## http://stats.grok.se/json/en/201111/Cisco_Systems
## http://stats.grok.se/json/en/201112/Cisco_Systems
## http://stats.grok.se/json/en/201201/Cisco_Systems
## http://stats.grok.se/json/en/201202/Cisco_Systems
## http://stats.grok.se/json/en/201203/Cisco_Systems
## http://stats.grok.se/json/en/201204/Cisco_Systems
## http://stats.grok.se/json/en/201205/Cisco_Systems
## http://stats.grok.se/json/en/201206/Cisco_Systems
## http://stats.grok.se/json/en/201207/Cisco_Systems
## http://stats.grok.se/json/en/201208/Cisco_Systems
## http://stats.grok.se/json/en/201209/Cisco_Systems
## http://stats.grok.se/json/en/201210/Cisco_Systems
## http://stats.grok.se/json/en/201211/Cisco_Systems
## http://stats.grok.se/json/en/201212/Cisco_Systems
## http://stats.grok.se/json/en/201301/Cisco_Systems
## http://stats.grok.se/json/en/201302/Cisco_Systems
## http://stats.grok.se/json/en/201303/Cisco_Systems
## http://stats.grok.se/json/en/201304/Cisco_Systems
## http://stats.grok.se/json/en/201305/Cisco_Systems
## http://stats.grok.se/json/en/201306/Cisco_Systems
## http://stats.grok.se/json/en/201307/Cisco_Systems
## http://stats.grok.se/json/en/201308/Cisco_Systems
## http://stats.grok.se/json/en/201309/Cisco_Systems
## http://stats.grok.se/json/en/201310/Cisco_Systems
## http://stats.grok.se/json/en/201311/Cisco_Systems
## http://stats.grok.se/json/en/201312/Cisco_Systems
## http://stats.grok.se/json/en/201401/Cisco_Systems
## http://stats.grok.se/json/en/201402/Cisco_Systems
## http://stats.grok.se/json/en/201403/Cisco_Systems
## http://stats.grok.se/json/en/201404/Cisco_Systems
## http://stats.grok.se/json/en/201405/Cisco_Systems
## http://stats.grok.se/json/en/201406/Cisco_Systems
## http://stats.grok.se/json/en/201407/Cisco_Systems
## http://stats.grok.se/json/en/201408/Cisco_Systems
## http://stats.grok.se/json/en/201409/Cisco_Systems
## http://stats.grok.se/json/en/201410/Cisco_Systems
## http://stats.grok.se/json/en/201411/Cisco_Systems
## http://stats.grok.se/json/en/201412/Cisco_Systems
## http://stats.grok.se/json/en/201501/Cisco_Systems
## http://stats.grok.se/json/en/201502/Cisco_Systems
## http://stats.grok.se/json/en/201503/Cisco_Systems
## http://stats.grok.se/json/en/201504/Cisco_Systems
## http://stats.grok.se/json/en/201505/Cisco_Systems
## http://stats.grok.se/json/en/201506/Cisco_Systems
## http://stats.grok.se/json/en/201507/Cisco_Systems
## http://stats.grok.se/json/en/201508/Cisco_Systems
## http://stats.grok.se/json/en/201509/Cisco_Systems
## http://stats.grok.se/json/en/201510/Cisco_Systems
## http://stats.grok.se/json/en/201511/Cisco_Systems
## http://stats.grok.se/json/en/201512/Cisco_Systems
views9<-wp_trend(page ="Pfizer" ,from =startDate ,to =endDate ,lang = "en",friendly = TRUE,requestFrom = "wp.trend.tester at wptt.wptt",userAgent = TRUE)
## Option 'requestFrom' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             Check wp_http_header() to know which information are send to 
##             stats.grok.se (R and package versions)
##             
## Option 'friendly' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             The package now is friendly by default.
##             
## Option 'userAgent' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             Check wp_http_header() to know which information are send to 
##             stats.grok.se (R and package versions)
##             
## http://stats.grok.se/json/en/201001/Pfizer
## http://stats.grok.se/json/en/201002/Pfizer
## http://stats.grok.se/json/en/201003/Pfizer
## http://stats.grok.se/json/en/201004/Pfizer
## http://stats.grok.se/json/en/201005/Pfizer
## http://stats.grok.se/json/en/201006/Pfizer
## http://stats.grok.se/json/en/201007/Pfizer
## http://stats.grok.se/json/en/201008/Pfizer
## http://stats.grok.se/json/en/201009/Pfizer
## http://stats.grok.se/json/en/201010/Pfizer
## http://stats.grok.se/json/en/201011/Pfizer
## http://stats.grok.se/json/en/201012/Pfizer
## http://stats.grok.se/json/en/201101/Pfizer
## http://stats.grok.se/json/en/201102/Pfizer
## http://stats.grok.se/json/en/201103/Pfizer
## http://stats.grok.se/json/en/201104/Pfizer
## http://stats.grok.se/json/en/201105/Pfizer
## http://stats.grok.se/json/en/201106/Pfizer
## http://stats.grok.se/json/en/201107/Pfizer
## http://stats.grok.se/json/en/201108/Pfizer
## http://stats.grok.se/json/en/201109/Pfizer
## http://stats.grok.se/json/en/201110/Pfizer
## http://stats.grok.se/json/en/201111/Pfizer
## http://stats.grok.se/json/en/201112/Pfizer
## http://stats.grok.se/json/en/201201/Pfizer
## http://stats.grok.se/json/en/201202/Pfizer
## http://stats.grok.se/json/en/201203/Pfizer
## http://stats.grok.se/json/en/201204/Pfizer
## http://stats.grok.se/json/en/201205/Pfizer
## http://stats.grok.se/json/en/201206/Pfizer
## http://stats.grok.se/json/en/201207/Pfizer
## http://stats.grok.se/json/en/201208/Pfizer
## http://stats.grok.se/json/en/201209/Pfizer
## http://stats.grok.se/json/en/201210/Pfizer
## http://stats.grok.se/json/en/201211/Pfizer
## http://stats.grok.se/json/en/201212/Pfizer
## http://stats.grok.se/json/en/201301/Pfizer
## http://stats.grok.se/json/en/201302/Pfizer
## http://stats.grok.se/json/en/201303/Pfizer
## http://stats.grok.se/json/en/201304/Pfizer
## http://stats.grok.se/json/en/201305/Pfizer
## http://stats.grok.se/json/en/201306/Pfizer
## http://stats.grok.se/json/en/201307/Pfizer
## http://stats.grok.se/json/en/201308/Pfizer
## http://stats.grok.se/json/en/201309/Pfizer
## http://stats.grok.se/json/en/201310/Pfizer
## http://stats.grok.se/json/en/201311/Pfizer
## http://stats.grok.se/json/en/201312/Pfizer
## http://stats.grok.se/json/en/201401/Pfizer
## http://stats.grok.se/json/en/201402/Pfizer
## http://stats.grok.se/json/en/201403/Pfizer
## http://stats.grok.se/json/en/201404/Pfizer
## http://stats.grok.se/json/en/201405/Pfizer
## http://stats.grok.se/json/en/201406/Pfizer
## http://stats.grok.se/json/en/201407/Pfizer
## http://stats.grok.se/json/en/201408/Pfizer
## http://stats.grok.se/json/en/201409/Pfizer
## http://stats.grok.se/json/en/201410/Pfizer
## http://stats.grok.se/json/en/201411/Pfizer
## http://stats.grok.se/json/en/201412/Pfizer
## http://stats.grok.se/json/en/201501/Pfizer
## http://stats.grok.se/json/en/201502/Pfizer
## http://stats.grok.se/json/en/201503/Pfizer
## http://stats.grok.se/json/en/201504/Pfizer
## http://stats.grok.se/json/en/201505/Pfizer
## http://stats.grok.se/json/en/201506/Pfizer
## http://stats.grok.se/json/en/201507/Pfizer
## http://stats.grok.se/json/en/201508/Pfizer
## http://stats.grok.se/json/en/201509/Pfizer
## http://stats.grok.se/json/en/201510/Pfizer
## http://stats.grok.se/json/en/201511/Pfizer
## http://stats.grok.se/json/en/201512/Pfizer
views10<-wp_trend(page ="UnitedHealth Group" ,from =startDate ,to =endDate ,lang = "en",friendly = TRUE,requestFrom = "wp.trend.tester at wptt.wptt",userAgent = TRUE)
## Option 'requestFrom' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             Check wp_http_header() to know which information are send to 
##             stats.grok.se (R and package versions)
##             
## Option 'friendly' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             The package now is friendly by default.
##             
## Option 'userAgent' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             Check wp_http_header() to know which information are send to 
##             stats.grok.se (R and package versions)
##             
## http://stats.grok.se/json/en/201001/UnitedHealth_Group
## http://stats.grok.se/json/en/201002/UnitedHealth_Group
## http://stats.grok.se/json/en/201003/UnitedHealth_Group
## http://stats.grok.se/json/en/201004/UnitedHealth_Group
## http://stats.grok.se/json/en/201005/UnitedHealth_Group
## http://stats.grok.se/json/en/201006/UnitedHealth_Group
## http://stats.grok.se/json/en/201007/UnitedHealth_Group
## http://stats.grok.se/json/en/201008/UnitedHealth_Group
## http://stats.grok.se/json/en/201009/UnitedHealth_Group
## http://stats.grok.se/json/en/201010/UnitedHealth_Group
## http://stats.grok.se/json/en/201011/UnitedHealth_Group
## http://stats.grok.se/json/en/201012/UnitedHealth_Group
## http://stats.grok.se/json/en/201101/UnitedHealth_Group
## http://stats.grok.se/json/en/201102/UnitedHealth_Group
## http://stats.grok.se/json/en/201103/UnitedHealth_Group
## http://stats.grok.se/json/en/201104/UnitedHealth_Group
## http://stats.grok.se/json/en/201105/UnitedHealth_Group
## http://stats.grok.se/json/en/201106/UnitedHealth_Group
## http://stats.grok.se/json/en/201107/UnitedHealth_Group
## http://stats.grok.se/json/en/201108/UnitedHealth_Group
## http://stats.grok.se/json/en/201109/UnitedHealth_Group
## http://stats.grok.se/json/en/201110/UnitedHealth_Group
## http://stats.grok.se/json/en/201111/UnitedHealth_Group
## http://stats.grok.se/json/en/201112/UnitedHealth_Group
## http://stats.grok.se/json/en/201201/UnitedHealth_Group
## http://stats.grok.se/json/en/201202/UnitedHealth_Group
## http://stats.grok.se/json/en/201203/UnitedHealth_Group
## http://stats.grok.se/json/en/201204/UnitedHealth_Group
## http://stats.grok.se/json/en/201205/UnitedHealth_Group
## http://stats.grok.se/json/en/201206/UnitedHealth_Group
## http://stats.grok.se/json/en/201207/UnitedHealth_Group
## http://stats.grok.se/json/en/201208/UnitedHealth_Group
## http://stats.grok.se/json/en/201209/UnitedHealth_Group
## http://stats.grok.se/json/en/201210/UnitedHealth_Group
## http://stats.grok.se/json/en/201211/UnitedHealth_Group
## http://stats.grok.se/json/en/201212/UnitedHealth_Group
## http://stats.grok.se/json/en/201301/UnitedHealth_Group
## http://stats.grok.se/json/en/201302/UnitedHealth_Group
## http://stats.grok.se/json/en/201303/UnitedHealth_Group
## http://stats.grok.se/json/en/201304/UnitedHealth_Group
## http://stats.grok.se/json/en/201305/UnitedHealth_Group
## http://stats.grok.se/json/en/201306/UnitedHealth_Group
## http://stats.grok.se/json/en/201307/UnitedHealth_Group
## http://stats.grok.se/json/en/201308/UnitedHealth_Group
## http://stats.grok.se/json/en/201309/UnitedHealth_Group
## http://stats.grok.se/json/en/201310/UnitedHealth_Group
## http://stats.grok.se/json/en/201311/UnitedHealth_Group
## http://stats.grok.se/json/en/201312/UnitedHealth_Group
## http://stats.grok.se/json/en/201401/UnitedHealth_Group
## http://stats.grok.se/json/en/201402/UnitedHealth_Group
## http://stats.grok.se/json/en/201403/UnitedHealth_Group
## http://stats.grok.se/json/en/201404/UnitedHealth_Group
## http://stats.grok.se/json/en/201405/UnitedHealth_Group
## http://stats.grok.se/json/en/201406/UnitedHealth_Group
## http://stats.grok.se/json/en/201407/UnitedHealth_Group
## http://stats.grok.se/json/en/201408/UnitedHealth_Group
## http://stats.grok.se/json/en/201409/UnitedHealth_Group
## http://stats.grok.se/json/en/201410/UnitedHealth_Group
## http://stats.grok.se/json/en/201411/UnitedHealth_Group
## http://stats.grok.se/json/en/201412/UnitedHealth_Group
## http://stats.grok.se/json/en/201501/UnitedHealth_Group
## http://stats.grok.se/json/en/201502/UnitedHealth_Group
## http://stats.grok.se/json/en/201503/UnitedHealth_Group
## http://stats.grok.se/json/en/201504/UnitedHealth_Group
## http://stats.grok.se/json/en/201505/UnitedHealth_Group
## http://stats.grok.se/json/en/201506/UnitedHealth_Group
## http://stats.grok.se/json/en/201507/UnitedHealth_Group
## http://stats.grok.se/json/en/201508/UnitedHealth_Group
## http://stats.grok.se/json/en/201509/UnitedHealth_Group
## http://stats.grok.se/json/en/201510/UnitedHealth_Group
## http://stats.grok.se/json/en/201511/UnitedHealth_Group
## http://stats.grok.se/json/en/201512/UnitedHealth_Group
views11<-wp_trend(page ="IBM" ,from =startDate ,to =endDate ,lang = "en",friendly = TRUE,requestFrom = "wp.trend.tester at wptt.wptt",userAgent = TRUE)
## Option 'requestFrom' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             Check wp_http_header() to know which information are send to 
##             stats.grok.se (R and package versions)
##             
## Option 'friendly' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             The package now is friendly by default.
##             
## Option 'userAgent' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             Check wp_http_header() to know which information are send to 
##             stats.grok.se (R and package versions)
##             
## http://stats.grok.se/json/en/201001/IBM
## http://stats.grok.se/json/en/201002/IBM
## http://stats.grok.se/json/en/201003/IBM
## http://stats.grok.se/json/en/201004/IBM
## http://stats.grok.se/json/en/201005/IBM
## http://stats.grok.se/json/en/201006/IBM
## http://stats.grok.se/json/en/201007/IBM
## http://stats.grok.se/json/en/201008/IBM
## http://stats.grok.se/json/en/201009/IBM
## http://stats.grok.se/json/en/201010/IBM
## http://stats.grok.se/json/en/201011/IBM
## http://stats.grok.se/json/en/201012/IBM
## http://stats.grok.se/json/en/201101/IBM
## http://stats.grok.se/json/en/201102/IBM
## http://stats.grok.se/json/en/201103/IBM
## http://stats.grok.se/json/en/201104/IBM
## http://stats.grok.se/json/en/201105/IBM
## http://stats.grok.se/json/en/201106/IBM
## http://stats.grok.se/json/en/201107/IBM
## http://stats.grok.se/json/en/201108/IBM
## http://stats.grok.se/json/en/201109/IBM
## http://stats.grok.se/json/en/201110/IBM
## http://stats.grok.se/json/en/201111/IBM
## http://stats.grok.se/json/en/201112/IBM
## http://stats.grok.se/json/en/201201/IBM
## http://stats.grok.se/json/en/201202/IBM
## http://stats.grok.se/json/en/201203/IBM
## http://stats.grok.se/json/en/201204/IBM
## http://stats.grok.se/json/en/201205/IBM
## http://stats.grok.se/json/en/201206/IBM
## http://stats.grok.se/json/en/201207/IBM
## http://stats.grok.se/json/en/201208/IBM
## http://stats.grok.se/json/en/201209/IBM
## http://stats.grok.se/json/en/201210/IBM
## http://stats.grok.se/json/en/201211/IBM
## http://stats.grok.se/json/en/201212/IBM
## http://stats.grok.se/json/en/201301/IBM
## http://stats.grok.se/json/en/201302/IBM
## http://stats.grok.se/json/en/201303/IBM
## http://stats.grok.se/json/en/201304/IBM
## http://stats.grok.se/json/en/201305/IBM
## http://stats.grok.se/json/en/201306/IBM
## http://stats.grok.se/json/en/201307/IBM
## http://stats.grok.se/json/en/201308/IBM
## http://stats.grok.se/json/en/201309/IBM
## http://stats.grok.se/json/en/201310/IBM
## http://stats.grok.se/json/en/201311/IBM
## http://stats.grok.se/json/en/201312/IBM
## http://stats.grok.se/json/en/201401/IBM
## http://stats.grok.se/json/en/201402/IBM
## http://stats.grok.se/json/en/201403/IBM
## http://stats.grok.se/json/en/201404/IBM
## http://stats.grok.se/json/en/201405/IBM
## http://stats.grok.se/json/en/201406/IBM
## http://stats.grok.se/json/en/201407/IBM
## http://stats.grok.se/json/en/201408/IBM
## http://stats.grok.se/json/en/201409/IBM
## http://stats.grok.se/json/en/201410/IBM
## http://stats.grok.se/json/en/201411/IBM
## http://stats.grok.se/json/en/201412/IBM
## http://stats.grok.se/json/en/201501/IBM
## http://stats.grok.se/json/en/201502/IBM
## http://stats.grok.se/json/en/201503/IBM
## http://stats.grok.se/json/en/201504/IBM
## http://stats.grok.se/json/en/201505/IBM
## http://stats.grok.se/json/en/201506/IBM
## http://stats.grok.se/json/en/201507/IBM
## http://stats.grok.se/json/en/201508/IBM
## http://stats.grok.se/json/en/201509/IBM
## http://stats.grok.se/json/en/201510/IBM
## http://stats.grok.se/json/en/201511/IBM
## http://stats.grok.se/json/en/201512/IBM
views5<-wp_trend(page ="McDonald's" ,from =startDate ,to =endDate ,lang = "en",friendly = TRUE,requestFrom = "wp.trend.tester at wptt.wptt",userAgent = TRUE)
## Option 'requestFrom' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             Check wp_http_header() to know which information are send to 
##             stats.grok.se (R and package versions)
##             
## Option 'friendly' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             The package now is friendly by default.
##             
## Option 'userAgent' is deprecated and will cause errors 
##             in futuere versions of the wp_trend() function. Please read 
##             the package vignette and/or README to learn about the new
##             set of options.
##             
##             Check wp_http_header() to know which information are send to 
##             stats.grok.se (R and package versions)
##             
## http://stats.grok.se/json/en/201001/McDonald's
## http://stats.grok.se/json/en/201002/McDonald's
## http://stats.grok.se/json/en/201003/McDonald's
## http://stats.grok.se/json/en/201004/McDonald's
## http://stats.grok.se/json/en/201005/McDonald's
## http://stats.grok.se/json/en/201006/McDonald's
## http://stats.grok.se/json/en/201007/McDonald's
## http://stats.grok.se/json/en/201008/McDonald's
## http://stats.grok.se/json/en/201009/McDonald's
## http://stats.grok.se/json/en/201010/McDonald's
## http://stats.grok.se/json/en/201011/McDonald's
## http://stats.grok.se/json/en/201012/McDonald's
## http://stats.grok.se/json/en/201101/McDonald's
## http://stats.grok.se/json/en/201102/McDonald's
## http://stats.grok.se/json/en/201103/McDonald's
## http://stats.grok.se/json/en/201104/McDonald's
## http://stats.grok.se/json/en/201105/McDonald's
## http://stats.grok.se/json/en/201106/McDonald's
## http://stats.grok.se/json/en/201107/McDonald's
## http://stats.grok.se/json/en/201108/McDonald's
## http://stats.grok.se/json/en/201109/McDonald's
## http://stats.grok.se/json/en/201110/McDonald's
## http://stats.grok.se/json/en/201111/McDonald's
## http://stats.grok.se/json/en/201112/McDonald's
## http://stats.grok.se/json/en/201201/McDonald's
## http://stats.grok.se/json/en/201202/McDonald's
## http://stats.grok.se/json/en/201203/McDonald's
## http://stats.grok.se/json/en/201204/McDonald's
## http://stats.grok.se/json/en/201205/McDonald's
## http://stats.grok.se/json/en/201206/McDonald's
## http://stats.grok.se/json/en/201207/McDonald's
## http://stats.grok.se/json/en/201208/McDonald's
## http://stats.grok.se/json/en/201209/McDonald's
## http://stats.grok.se/json/en/201210/McDonald's
## http://stats.grok.se/json/en/201211/McDonald's
## http://stats.grok.se/json/en/201212/McDonald's
## http://stats.grok.se/json/en/201301/McDonald's
## http://stats.grok.se/json/en/201302/McDonald's
## http://stats.grok.se/json/en/201303/McDonald's
## http://stats.grok.se/json/en/201304/McDonald's
## http://stats.grok.se/json/en/201305/McDonald's
## http://stats.grok.se/json/en/201306/McDonald's
## http://stats.grok.se/json/en/201307/McDonald's
## http://stats.grok.se/json/en/201308/McDonald's
## http://stats.grok.se/json/en/201309/McDonald's
## http://stats.grok.se/json/en/201310/McDonald's
## http://stats.grok.se/json/en/201311/McDonald's
## http://stats.grok.se/json/en/201312/McDonald's
## http://stats.grok.se/json/en/201401/McDonald's
## http://stats.grok.se/json/en/201402/McDonald's
## http://stats.grok.se/json/en/201403/McDonald's
## http://stats.grok.se/json/en/201404/McDonald's
## http://stats.grok.se/json/en/201405/McDonald's
## http://stats.grok.se/json/en/201406/McDonald's
## http://stats.grok.se/json/en/201407/McDonald's
## http://stats.grok.se/json/en/201408/McDonald's
## http://stats.grok.se/json/en/201409/McDonald's
## http://stats.grok.se/json/en/201410/McDonald's
## http://stats.grok.se/json/en/201411/McDonald's
## http://stats.grok.se/json/en/201412/McDonald's
## http://stats.grok.se/json/en/201501/McDonald's
## http://stats.grok.se/json/en/201502/McDonald's
## http://stats.grok.se/json/en/201503/McDonald's
## http://stats.grok.se/json/en/201504/McDonald's
## http://stats.grok.se/json/en/201505/McDonald's
## http://stats.grok.se/json/en/201506/McDonald's
## http://stats.grok.se/json/en/201507/McDonald's
## http://stats.grok.se/json/en/201508/McDonald's
## http://stats.grok.se/json/en/201509/McDonald's
## http://stats.grok.se/json/en/201510/McDonald's
## http://stats.grok.se/json/en/201511/McDonald's
## http://stats.grok.se/json/en/201512/McDonald's
viewdf1<-cbind(views1[,1:2],views2[,2],views3[,2],views4[,2],views5[,2],views6[,2],views7[,2],views8[,2],views9[,2],views10[,2],views11[,2])
CombDF1<-merge(viewdf1,DJIADF, by.x='date', by.y='date')


DataSet<-data.frame(RSI3,EMAcross,MACDsignal,SMI,BBp,CCI20,DEMA10c) 

DataSet<-DataSet[-c(1:33),] 


Alldata<-cbind(DataSet,CombDF1[33:1509,2:12])

Normalized <-function(x) {(x-min(x))/(max(x)-min(x))}
NormalizedData<-as.data.frame(lapply(Alldata,Normalized))

ClassDF<-data.frame(date = index(Class), Class, row.names=NULL)


AlldataNormalized<-data.frame(NormalizedData,ClassDF[33:1509,2])




colnames(AlldataNormalized)<-c("RSI3","EMAcross","MACDsignal","SMI","BBp","CCI20","DEMA10c","Views1","Views2","Views3","Views4","Views5","Views6","Views7","Views8","Views9","Views10","Views11","Class") 
TrainingSet<-AlldataNormalized[1:1000,] 

TestSet<-AlldataNormalized[1001:1477,]

TrainClass<-TrainingSet[,19] 
TrainPred<-TrainingSet[,-19] 

TestClass<-TestSet[,19] 
TestPred<-TestSet[,-19] 
library(h2o)
## Loading required package: statmod
## 
## ----------------------------------------------------------------------
## 
## Your next step is to start H2O:
##     > h2o.init()
## 
## For H2O package documentation, ask for help:
##     > ??h2o
## 
## After starting H2O, you can use the Web UI at http://localhost:54321
## For more information visit http://docs.h2o.ai
## 
## ----------------------------------------------------------------------
## 
## 
## Attaching package: 'h2o'
## 
## The following objects are masked from 'package:stats':
## 
##     sd, var
## 
## The following objects are masked from 'package:base':
## 
##     %*%, apply, as.factor, as.numeric, colnames, colnames<-,
##     ifelse, %in%, is.factor, is.numeric, log, trunc
localH2O <- h2o.init(ip = "localhost", port = 54321, startH2O = TRUE)
## 
## H2O is not running yet, starting it now...
## 
## Note:  In case of errors look at the following log files:
##     /tmp/RtmptyRMPQ/h2o_mitra2_started_from_r.out
##     /tmp/RtmptyRMPQ/h2o_mitra2_started_from_r.err
## 
## 
## ...Successfully connected to http://localhost:54321/ 
## 
## R is connected to the H2O cluster: 
##     H2O cluster uptime:         2 seconds 450 milliseconds 
##     H2O cluster version:        3.6.0.8 
##     H2O cluster name:           H2O_started_from_R_mitra2_nwe867 
##     H2O cluster total nodes:    1 
##     H2O cluster total memory:   0.66 GB 
##     H2O cluster total cores:    4 
##     H2O cluster allowed cores:  2 
##     H2O cluster healthy:        TRUE 
## 
## Note:  As started, H2O is limited to the CRAN default of 2 CPUs.
##        Shut down and restart H2O as shown below to use all your CPUs.
##            > h2o.shutdown()
##            > h2o.init(nthreads = -1)
localH2O = h2o.init(ip = "localhost", port = 54321, startH2O = TRUE, 
                    Xmx = '2g')
## Warning in h2o.init(ip = "localhost", port = 54321, startH2O = TRUE, Xmx =
## "2g"): Xmx is a deprecated parameter. Use `max_mem_size` and `min_mem_size`
## to set the memory boundaries. Using `Xmx` to set these.
## Successfully connected to http://localhost:54321/ 
## 
## R is connected to the H2O cluster: 
##     H2O cluster uptime:         2 seconds 598 milliseconds 
##     H2O cluster version:        3.6.0.8 
##     H2O cluster name:           H2O_started_from_R_mitra2_nwe867 
##     H2O cluster total nodes:    1 
##     H2O cluster total memory:   0.66 GB 
##     H2O cluster total cores:    4 
##     H2O cluster allowed cores:  2 
##     H2O cluster healthy:        TRUE
TrainH2o<-as.h2o(TrainingSet, destination_frame = "TrainH2o")
## 
  |                                                                       
  |                                                                 |   0%
  |                                                                       
  |=================================================================| 100%
TestH2o<-as.h2o(TestPred, destination_frame = "TestH2o")
## 
  |                                                                       
  |                                                                 |   0%
  |                                                                       
  |=================================================================| 100%
model <- h2o.deeplearning(x = 1:18,y = 19,training_frame = TrainH2o, activation = "MaxoutWithDropout",hidden = c(200,200),epochs = 200,rate_decay =5e-4, l1=1e-5)
## 
  |                                                                       
  |                                                                 |   0%
  |                                                                       
  |                                                                 |   1%
  |                                                                       
  |=                                                                |   1%
  |                                                                       
  |=                                                                |   2%
  |                                                                       
  |==                                                               |   2%
  |                                                                       
  |==                                                               |   3%
  |                                                                       
  |==                                                               |   4%
  |                                                                       
  |===                                                              |   4%
  |                                                                       
  |===                                                              |   5%
  |                                                                       
  |====                                                             |   6%
  |                                                                       
  |====                                                             |   7%
  |                                                                       
  |=====                                                            |   7%
  |                                                                       
  |=====                                                            |   8%
  |                                                                       
  |======                                                           |   9%
  |                                                                       
  |======                                                           |  10%
  |                                                                       
  |=======                                                          |  10%
  |                                                                       
  |=======                                                          |  11%
  |                                                                       
  |========                                                         |  12%
  |                                                                       
  |========                                                         |  13%
  |                                                                       
  |=========                                                        |  14%
  |                                                                       
  |==========                                                       |  15%
  |                                                                       
  |==========                                                       |  16%
  |                                                                       
  |===========                                                      |  17%
  |                                                                       
  |============                                                     |  18%
  |                                                                       
  |============                                                     |  19%
  |                                                                       
  |=============                                                    |  20%
  |                                                                       
  |==============                                                   |  21%
  |                                                                       
  |==============                                                   |  22%
  |                                                                       
  |===============                                                  |  23%
  |                                                                       
  |================                                                 |  24%
  |                                                                       
  |================                                                 |  25%
  |                                                                       
  |=================                                                |  26%
  |                                                                       
  |==================                                               |  27%
  |                                                                       
  |==================                                               |  28%
  |                                                                       
  |===================                                              |  29%
  |                                                                       
  |===================                                              |  30%
  |                                                                       
  |====================                                             |  30%
  |                                                                       
  |====================                                             |  31%
  |                                                                       
  |=====================                                            |  32%
  |                                                                       
  |=====================                                            |  33%
  |                                                                       
  |======================                                           |  33%
  |                                                                       
  |======================                                           |  34%
  |                                                                       
  |=======================                                          |  35%
  |                                                                       
  |=======================                                          |  36%
  |                                                                       
  |========================                                         |  36%
  |                                                                       
  |========================                                         |  37%
  |                                                                       
  |========================                                         |  38%
  |                                                                       
  |=========================                                        |  38%
  |                                                                       
  |=========================                                        |  39%
  |                                                                       
  |==========================                                       |  39%
  |                                                                       
  |==========================                                       |  40%
  |                                                                       
  |==========================                                       |  41%
  |                                                                       
  |===========================                                      |  41%
  |                                                                       
  |===========================                                      |  42%
  |                                                                       
  |============================                                     |  42%
  |                                                                       
  |============================                                     |  43%
  |                                                                       
  |============================                                     |  44%
  |                                                                       
  |=============================                                    |  44%
  |                                                                       
  |=============================                                    |  45%
  |                                                                       
  |==============================                                   |  46%
  |                                                                       
  |==============================                                   |  47%
  |                                                                       
  |===============================                                  |  47%
  |                                                                       
  |===============================                                  |  48%
  |                                                                       
  |================================                                 |  49%
  |                                                                       
  |================================                                 |  50%
  |                                                                       
  |=================================                                |  50%
  |                                                                       
  |=================================                                |  51%
  |                                                                       
  |==================================                               |  52%
  |                                                                       
  |==================================                               |  53%
  |                                                                       
  |=================================================================| 100%
h2o_yhat_test <- h2o.predict(model,TestH2o)
df_yhat_test <- as.data.frame(h2o_yhat_test)

prediction <-df_yhat_test[,1] 

confusionMatrix(prediction,TestClass)
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction DOWN  UP
##       DOWN  169  55
##       UP     57 196
##                                           
##                Accuracy : 0.7652          
##                  95% CI : (0.7245, 0.8025)
##     No Information Rate : 0.5262          
##     P-Value [Acc > NIR] : <2e-16          
##                                           
##                   Kappa : 0.5289          
##  Mcnemar's Test P-Value : 0.9247          
##                                           
##             Sensitivity : 0.7478          
##             Specificity : 0.7809          
##          Pos Pred Value : 0.7545          
##          Neg Pred Value : 0.7747          
##              Prevalence : 0.4738          
##          Detection Rate : 0.3543          
##    Detection Prevalence : 0.4696          
##       Balanced Accuracy : 0.7643          
##                                           
##        'Positive' Class : DOWN            
## 
hyper_params <- list(
  hidden=list(c(200,200), c(100,300,100), c(500,500,500),c(100,100,150)),
  input_dropout_ratio=c(0,0.05),
  rate=c(0.01,0.02),
  rate_annealing=c(1e-8,1e-7,1e-6)
)


grid <- h2o.grid( "deeplearning", model_id="dl_grid", training_frame=TrainH2o, x=1:18,y=19,
  epochs=100,
  stopping_metric="misclassification",
  stopping_tolerance=1e-2,        ## stop when logloss does not improve by >=1% for 2 scoring events
  stopping_rounds=2,
  score_validation_samples=10000, ## downsample validation set for faster scoring
  score_duty_cycle=0.025,         ## don't score more than 2.5% of the wall time
  adaptive_rate=F,                ## manually tuned learning rate
  momentum_start=0.5,             ## manually tuned momentum
  momentum_stable=0.9, 
  momentum_ramp=1e7, 
  l1=1e-5,
  l2=1e-5,
  activation=c("TanhWithDropout"),
  max_w2=10,                      ## can help improve stability for Rectifier
  hyper_params=hyper_params
)
## 
  |                                                                       
  |                                                                 |   0%
  |                                                                       
  |=                                                                |   2%
  |                                                                       
  |===                                                              |   4%
  |                                                                       
  |====                                                             |   6%
  |                                                                       
  |=====                                                            |   8%
  |                                                                       
  |=======                                                          |  10%
  |                                                                       
  |========                                                         |  12%
  |                                                                       
  |=========                                                        |  15%
  |                                                                       
  |===========                                                      |  17%
  |                                                                       
  |============                                                     |  19%
  |                                                                       
  |==============                                                   |  21%
  |                                                                       
  |===============                                                  |  23%
  |                                                                       
  |================                                                 |  25%
  |                                                                       
  |==================                                               |  27%
  |                                                                       
  |===================                                              |  29%
  |                                                                       
  |====================                                             |  31%
  |                                                                       
  |======================                                           |  33%
  |                                                                       
  |=======================                                          |  35%
  |                                                                       
  |========================                                         |  38%
  |                                                                       
  |==========================                                       |  40%
  |                                                                       
  |===========================                                      |  42%
  |                                                                       
  |============================                                     |  44%
  |                                                                       
  |==============================                                   |  46%
  |                                                                       
  |===============================                                  |  48%
  |                                                                       
  |================================                                 |  50%
  |                                                                       
  |==================================                               |  52%
  |                                                                       
  |===================================                              |  54%
  |                                                                       
  |=====================================                            |  56%
  |                                                                       
  |======================================                           |  58%
  |                                                                       
  |=======================================                          |  60%
  |                                                                       
  |=========================================                        |  62%
  |                                                                       
  |==========================================                       |  65%
  |                                                                       
  |===========================================                      |  67%
  |                                                                       
  |=============================================                    |  69%
  |                                                                       
  |==============================================                   |  71%
  |                                                                       
  |===============================================                  |  73%
  |                                                                       
  |=================================================                |  75%
  |                                                                       
  |==================================================               |  77%
  |                                                                       
  |===================================================              |  79%
  |                                                                       
  |=====================================================            |  81%
  |                                                                       
  |======================================================           |  83%
  |                                                                       
  |========================================================         |  85%
  |                                                                       
  |=========================================================        |  88%
  |                                                                       
  |==========================================================       |  90%
  |                                                                       
  |============================================================     |  92%
  |                                                                       
  |=============================================================    |  94%
  |                                                                       
  |==============================================================   |  96%
  |                                                                       
  |================================================================ |  98%
  |                                                                       
  |=================================================================| 100%
model_grid <- h2o.grid("deeplearning",hyper_params = hyper_params,x = 1:18,y = 19,training_frame = TrainH2o,distribution = "multinomial", activation = "TanhWithDropout")
## 
  |                                                                       
  |                                                                 |   0%
  |                                                                       
  |=                                                                |   2%
  |                                                                       
  |===                                                              |   4%
  |                                                                       
  |====                                                             |   6%
  |                                                                       
  |=====                                                            |   8%
  |                                                                       
  |=======                                                          |  10%
  |                                                                       
  |========                                                         |  12%
  |                                                                       
  |=========                                                        |  15%
  |                                                                       
  |===========                                                      |  17%
  |                                                                       
  |============                                                     |  19%
  |                                                                       
  |==============                                                   |  21%
  |                                                                       
  |===============                                                  |  23%
  |                                                                       
  |================                                                 |  25%
  |                                                                       
  |==================                                               |  27%
  |                                                                       
  |===================                                              |  29%
  |                                                                       
  |====================                                             |  31%
  |                                                                       
  |======================                                           |  33%
  |                                                                       
  |=======================                                          |  35%
  |                                                                       
  |========================                                         |  38%
  |                                                                       
  |==========================                                       |  40%
  |                                                                       
  |===========================                                      |  42%
  |                                                                       
  |============================                                     |  44%
  |                                                                       
  |==============================                                   |  46%
  |                                                                       
  |===============================                                  |  48%
  |                                                                       
  |================================                                 |  50%
  |                                                                       
  |==================================                               |  52%
  |                                                                       
  |===================================                              |  54%
  |                                                                       
  |=====================================                            |  56%
  |                                                                       
  |======================================                           |  58%
  |                                                                       
  |=======================================                          |  60%
  |                                                                       
  |=========================================                        |  62%
  |                                                                       
  |==========================================                       |  65%
  |                                                                       
  |===========================================                      |  67%
  |                                                                       
  |=============================================                    |  69%
  |                                                                       
  |==============================================                   |  71%
  |                                                                       
  |===============================================                  |  73%
  |                                                                       
  |=================================================                |  75%
  |                                                                       
  |==================================================               |  77%
  |                                                                       
  |===================================================              |  79%
  |                                                                       
  |=====================================================            |  81%
  |                                                                       
  |======================================================           |  83%
  |                                                                       
  |========================================================         |  85%
  |                                                                       
  |=========================================================        |  88%
  |                                                                       
  |==========================================================       |  90%
  |                                                                       
  |============================================================     |  92%
  |                                                                       
  |=============================================================    |  94%
  |                                                                       
  |==============================================================   |  96%
  |                                                                       
  |================================================================ |  98%
  |                                                                       
  |=================================================================| 100%
summary(grid)
## H2O Grid Details
## ================
## 
## Grid ID: Grid_DeepLearning_TrainH2o_model_R_1457905851217_7 
## Used hyper parameters: 
##   -  input_dropout_ratio 
##   -  rate 
##   -  hidden 
##   -  rate_annealing 
## Number of models: 48 
## Number of failed models: 0 
## 
## Generated models
## ----------------
##  input_dropout_ratio rate        hidden rate_annealing status_ok
##                 0.05 0.02     [200,200]          1e-07        OK
##                 0.00 0.01 [500,500,500]          1e-07        OK
##                 0.05 0.02 [500,500,500]          1e-07        OK
##                 0.00 0.01 [500,500,500]          1e-08        OK
##                 0.05 0.02 [100,100,150]          1e-08        OK
##                 0.05 0.02 [500,500,500]          1e-06        OK
##                 0.05 0.01     [200,200]          1e-06        OK
##                 0.00 0.02 [100,300,100]          1e-06        OK
##                 0.00 0.01 [100,100,150]          1e-08        OK
##                 0.00 0.02 [100,300,100]          1e-07        OK
##                 0.00 0.02     [200,200]          1e-07        OK
##                 0.05 0.02 [500,500,500]          1e-08        OK
##                 0.00 0.02 [100,100,150]          1e-06        OK
##                 0.00 0.01 [100,300,100]          1e-08        OK
##                 0.00 0.01     [200,200]          1e-07        OK
##                 0.05 0.01 [500,500,500]          1e-07        OK
##                 0.05 0.02 [100,300,100]          1e-07        OK
##                 0.05 0.01     [200,200]          1e-08        OK
##                 0.05 0.01 [500,500,500]          1e-08        OK
##                 0.00 0.01 [100,300,100]          1e-06        OK
##                 0.05 0.02 [100,300,100]          1e-06        OK
##                 0.00 0.01 [100,100,150]          1e-06        OK
##                 0.05 0.01 [100,100,150]          1e-06        OK
##                 0.05 0.02 [100,100,150]          1e-07        OK
##                 0.00 0.01     [200,200]          1e-06        OK
##                 0.00 0.02 [500,500,500]          1e-06        OK
##                 0.05 0.01 [500,500,500]          1e-06        OK
##                 0.00 0.02 [500,500,500]          1e-08        OK
##                 0.00 0.02     [200,200]          1e-06        OK
##                 0.05 0.02 [100,100,150]          1e-06        OK
##                 0.00 0.01 [100,100,150]          1e-07        OK
##                 0.00 0.02     [200,200]          1e-08        OK
##                 0.05 0.02 [100,300,100]          1e-08        OK
##                 0.05 0.01 [100,100,150]          1e-07        OK
##                 0.05 0.01 [100,300,100]          1e-07        OK
##                 0.05 0.02     [200,200]          1e-08        OK
##                 0.00 0.01 [100,300,100]          1e-07        OK
##                 0.00 0.02 [100,300,100]          1e-08        OK
##                 0.00 0.01 [500,500,500]          1e-06        OK
##                 0.05 0.01 [100,300,100]          1e-08        OK
##                 0.00 0.02 [100,100,150]          1e-07        OK
##                 0.00 0.01     [200,200]          1e-08        OK
##                 0.05 0.01     [200,200]          1e-07        OK
##                 0.00 0.02 [100,100,150]          1e-08        OK
##                 0.00 0.02 [500,500,500]          1e-07        OK
##                 0.05 0.01 [100,300,100]          1e-06        OK
##                 0.05 0.01 [100,100,150]          1e-08        OK
##                 0.05 0.02     [200,200]          1e-06        OK
##                         model_ids
##  dl_grid_model_1457905851217_8_19
##  dl_grid_model_1457905851217_8_24
##  dl_grid_model_1457905851217_8_27
##   dl_grid_model_1457905851217_8_8
##  dl_grid_model_1457905851217_8_15
##  dl_grid_model_1457905851217_8_43
##  dl_grid_model_1457905851217_8_33
##  dl_grid_model_1457905851217_8_38
##  dl_grid_model_1457905851217_8_12
##  dl_grid_model_1457905851217_8_22
##  dl_grid_model_1457905851217_8_18
##  dl_grid_model_1457905851217_8_11
##  dl_grid_model_1457905851217_8_46
##   dl_grid_model_1457905851217_8_4
##  dl_grid_model_1457905851217_8_16
##  dl_grid_model_1457905851217_8_25
##  dl_grid_model_1457905851217_8_23
##   dl_grid_model_1457905851217_8_1
##   dl_grid_model_1457905851217_8_9
##  dl_grid_model_1457905851217_8_36
##  dl_grid_model_1457905851217_8_39
##  dl_grid_model_1457905851217_8_44
##  dl_grid_model_1457905851217_8_45
##  dl_grid_model_1457905851217_8_31
##  dl_grid_model_1457905851217_8_32
##  dl_grid_model_1457905851217_8_42
##  dl_grid_model_1457905851217_8_41
##  dl_grid_model_1457905851217_8_10
##  dl_grid_model_1457905851217_8_34
##  dl_grid_model_1457905851217_8_47
##  dl_grid_model_1457905851217_8_28
##   dl_grid_model_1457905851217_8_2
##   dl_grid_model_1457905851217_8_7
##  dl_grid_model_1457905851217_8_29
##  dl_grid_model_1457905851217_8_21
##   dl_grid_model_1457905851217_8_3
##  dl_grid_model_1457905851217_8_20
##   dl_grid_model_1457905851217_8_6
##  dl_grid_model_1457905851217_8_40
##   dl_grid_model_1457905851217_8_5
##  dl_grid_model_1457905851217_8_30
##   dl_grid_model_1457905851217_8_0
##  dl_grid_model_1457905851217_8_17
##  dl_grid_model_1457905851217_8_14
##  dl_grid_model_1457905851217_8_26
##  dl_grid_model_1457905851217_8_37
##  dl_grid_model_1457905851217_8_13
##  dl_grid_model_1457905851217_8_35
## H2O Grid Summary
## ================
## 
## Grid ID: Grid_DeepLearning_TrainH2o_model_R_1457905851217_7 
## Used hyper parameters: 
##   -  input_dropout_ratio 
##   -  rate 
##   -  hidden 
##   -  rate_annealing 
## Number of models: 48 
##   -  dl_grid_model_1457905851217_8_19 
##   -  dl_grid_model_1457905851217_8_24 
##   -  dl_grid_model_1457905851217_8_27 
##   -  dl_grid_model_1457905851217_8_8 
##   -  dl_grid_model_1457905851217_8_15 
##   -  dl_grid_model_1457905851217_8_43 
##   -  dl_grid_model_1457905851217_8_33 
##   -  dl_grid_model_1457905851217_8_38 
##   -  dl_grid_model_1457905851217_8_12 
##   -  dl_grid_model_1457905851217_8_22 
##   -  dl_grid_model_1457905851217_8_18 
##   -  dl_grid_model_1457905851217_8_11 
##   -  dl_grid_model_1457905851217_8_46 
##   -  dl_grid_model_1457905851217_8_4 
##   -  dl_grid_model_1457905851217_8_16 
##   -  dl_grid_model_1457905851217_8_25 
##   -  dl_grid_model_1457905851217_8_23 
##   -  dl_grid_model_1457905851217_8_1 
##   -  dl_grid_model_1457905851217_8_9 
##   -  dl_grid_model_1457905851217_8_36 
##   -  dl_grid_model_1457905851217_8_39 
##   -  dl_grid_model_1457905851217_8_44 
##   -  dl_grid_model_1457905851217_8_45 
##   -  dl_grid_model_1457905851217_8_31 
##   -  dl_grid_model_1457905851217_8_32 
##   -  dl_grid_model_1457905851217_8_42 
##   -  dl_grid_model_1457905851217_8_41 
##   -  dl_grid_model_1457905851217_8_10 
##   -  dl_grid_model_1457905851217_8_34 
##   -  dl_grid_model_1457905851217_8_47 
##   -  dl_grid_model_1457905851217_8_28 
##   -  dl_grid_model_1457905851217_8_2 
##   -  dl_grid_model_1457905851217_8_7 
##   -  dl_grid_model_1457905851217_8_29 
##   -  dl_grid_model_1457905851217_8_21 
##   -  dl_grid_model_1457905851217_8_3 
##   -  dl_grid_model_1457905851217_8_20 
##   -  dl_grid_model_1457905851217_8_6 
##   -  dl_grid_model_1457905851217_8_40 
##   -  dl_grid_model_1457905851217_8_5 
##   -  dl_grid_model_1457905851217_8_30 
##   -  dl_grid_model_1457905851217_8_0 
##   -  dl_grid_model_1457905851217_8_17 
##   -  dl_grid_model_1457905851217_8_14 
##   -  dl_grid_model_1457905851217_8_26 
##   -  dl_grid_model_1457905851217_8_37 
##   -  dl_grid_model_1457905851217_8_13 
##   -  dl_grid_model_1457905851217_8_35 
## 
## Number of failed models: 0
model_ids <- grid@model_ids
models <- lapply(model_ids, function(id) { h2o.getModel(id)})

models 
## [[1]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_19 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 44,402 weights/biases, 355.3 KB, 49,726 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  5.00 %                                     
## 2     2   200 TanhDropout 50.00 % 0.000010 0.000010  0.019901 0.000000
## 3     3   200 TanhDropout 50.00 % 0.000010 0.000010  0.019901 0.000000
## 4     4     2     Softmax         0.000010 0.000010  0.019901 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.501989   -0.001241   0.249808  0.012369 0.121621
## 3 0.501989    0.000101   0.067576 -0.002646 0.037190
## 4 0.501989    0.013619   0.171743  0.002063 0.264019
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1626578
## R^2:  0.344291
## LogLoss:  0.5367067
## AUC:  0.8648313
## Gini:  0.7296625
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    348 108 0.236842   =108/456
## UP       97 447 0.178309    =97/544
## Totals  445 555 0.205000  =205/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.624929 0.813467 185
## 2                     max f2  0.084093 0.883959 334
## 3               max f0point5  0.927394 0.822180  76
## 4               max accuracy  0.639873 0.795000 183
## 5              max precision  0.993634 1.000000   0
## 6           max absolute_MCC  0.639873 0.586236 183
## 7 max min_per_class_accuracy  0.719337 0.789474 161
## 
## 
## 
## [[2]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_24 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 511,502 weights/biases, 3.9 MB, 32,934 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  0.00 %                                     
## 2     2   500 TanhDropout 50.00 % 0.000010 0.000010  0.009967 0.000000
## 3     3   500 TanhDropout 50.00 % 0.000010 0.000010  0.009967 0.000000
## 4     4   500 TanhDropout 50.00 % 0.000010 0.000010  0.009967 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.009967 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.501317   -0.000091   0.112018  0.002275 0.026177
## 3 0.501317   -0.000130   0.045342 -0.000408 0.010156
## 4 0.501317   -0.000146   0.045012  0.000217 0.006742
## 5 0.501317   -0.003293   0.103511 -0.000000 0.133594
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1483592
## R^2:  0.4019318
## LogLoss:  0.4646102
## AUC:  0.8702371
## Gini:  0.7404742
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    332 124 0.271930   =124/456
## UP       78 466 0.143382    =78/544
## Totals  410 590 0.202000  =202/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.477978 0.821869 222
## 2                     max f2  0.173218 0.886403 309
## 3               max f0point5  0.790988 0.831911 134
## 4               max accuracy  0.602110 0.799000 192
## 5              max precision  0.982665 1.000000   0
## 6           max absolute_MCC  0.602110 0.594418 192
## 7 max min_per_class_accuracy  0.667235 0.794118 176
## 
## 
## 
## [[3]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_27 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 511,502 weights/biases, 3.9 MB, 16,140 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  5.00 %                                     
## 2     2   500 TanhDropout 50.00 % 0.000010 0.000010  0.019968 0.000000
## 3     3   500 TanhDropout 50.00 % 0.000010 0.000010  0.019968 0.000000
## 4     4   500 TanhDropout 50.00 % 0.000010 0.000010  0.019968 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.019968 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.500646    0.000291   0.118114 -0.001405 0.036532
## 3 0.500646   -0.000138   0.045313 -0.000065 0.015720
## 4 0.500646   -0.000149   0.044904  0.000042 0.007013
## 5 0.500646   -0.003295   0.104159  0.000000 0.098103
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1543109
## R^2:  0.377939
## LogLoss:  0.4825531
## AUC:  0.862211
## Gini:  0.7244219
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    313 143 0.313596   =143/456
## UP       71 473 0.130515    =71/544
## Totals  384 616 0.214000  =214/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.343151 0.815517 252
## 2                     max f2  0.097415 0.885240 342
## 3               max f0point5  0.726334 0.823627 148
## 4               max accuracy  0.555973 0.790000 198
## 5              max precision  0.980575 1.000000   0
## 6           max absolute_MCC  0.649313 0.579397 172
## 7 max min_per_class_accuracy  0.591604 0.787281 189
## 
## 
## 
## [[4]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_8 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 511,502 weights/biases, 3.9 MB, 25,694 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  0.00 %                                     
## 2     2   500 TanhDropout 50.00 % 0.000010 0.000010  0.009997 0.000000
## 3     3   500 TanhDropout 50.00 % 0.000010 0.000010  0.009997 0.000000
## 4     4   500 TanhDropout 50.00 % 0.000010 0.000010  0.009997 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.009997 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.501028   -0.000144   0.112752  0.000013 0.027207
## 3 0.501028   -0.000118   0.045289  0.000332 0.010508
## 4 0.501028   -0.000148   0.044927 -0.000007 0.005385
## 5 0.501028   -0.003294   0.103339 -0.009607 0.129551
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1440823
## R^2:  0.419173
## LogLoss:  0.4494237
## AUC:  0.8708882
## Gini:  0.7417763
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    343 113 0.247807   =113/456
## UP       84 460 0.154412    =84/544
## Totals  427 573 0.197000  =197/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.507066 0.823635 211
## 2                     max f2  0.185955 0.891393 316
## 3               max f0point5  0.728470 0.833333 136
## 4               max accuracy  0.507066 0.803000 211
## 5              max precision  0.970150 1.000000   0
## 6           max absolute_MCC  0.507066 0.601912 211
## 7 max min_per_class_accuracy  0.628000 0.796053 179
## 
## 
## 
## [[5]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_15 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 27,452 weights/biases, 223.1 KB, 60,000 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  5.00 %                                     
## 2     2   100 TanhDropout 50.00 % 0.000010 0.000010  0.019988 0.000000
## 3     3   100 TanhDropout 50.00 % 0.000010 0.000010  0.019988 0.000000
## 4     4   150 TanhDropout 50.00 % 0.000010 0.000010  0.019988 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.019988 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.502400   -0.019309   0.328031  0.002037 0.117620
## 3 0.502400    0.000351   0.109422 -0.009580 0.049107
## 4 0.502400    0.000323   0.092832 -0.000041 0.019998
## 5 0.502400    0.014493   0.195558  0.000000 0.194651
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1802678
## R^2:  0.2733014
## LogLoss:  0.5779025
## AUC:  0.8448465
## Gini:  0.689693
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    316 140 0.307018   =140/456
## UP       85 459 0.156250    =85/544
## Totals  401 599 0.225000  =225/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.464885 0.803150 221
## 2                     max f2  0.089900 0.882063 379
## 3               max f0point5  0.929821 0.802469  84
## 4               max accuracy  0.811956 0.775000 150
## 5              max precision  0.961231 1.000000   0
## 6           max absolute_MCC  0.811956 0.548659 150
## 7 max min_per_class_accuracy  0.811956 0.773897 150
## 
## 
## 
## [[6]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_43 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 511,502 weights/biases, 3.9 MB, 32,604 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  5.00 %                                     
## 2     2   500 TanhDropout 50.00 % 0.000010 0.000010  0.019369 0.000000
## 3     3   500 TanhDropout 50.00 % 0.000010 0.000010  0.019369 0.000000
## 4     4   500 TanhDropout 50.00 % 0.000010 0.000010  0.019369 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.019369 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.501304   -0.000839   0.129777  0.003506 0.040153
## 3 0.501304   -0.000119   0.044447  0.000090 0.016989
## 4 0.501304   -0.000148   0.043777  0.000385 0.009778
## 5 0.501304   -0.003249   0.103781  0.000330 0.195060
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1570646
## R^2:  0.3668384
## LogLoss:  0.5021792
## AUC:  0.8647647
## Gini:  0.7295295
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    307 149 0.326754   =149/456
## UP       61 483 0.112132    =61/544
## Totals  368 632 0.210000  =210/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.356498 0.821429 244
## 2                     max f2  0.095936 0.889716 336
## 3               max f0point5  0.883208 0.822011 100
## 4               max accuracy  0.665637 0.794000 175
## 5              max precision  0.990196 1.000000   0
## 6           max absolute_MCC  0.665637 0.584937 175
## 7 max min_per_class_accuracy  0.715980 0.788603 161
## 
## 
## 
## [[7]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_33 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 44,402 weights/biases, 355.3 KB, 38,048 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  5.00 %                                     
## 2     2   200 TanhDropout 50.00 % 0.000010 0.000010  0.009633 0.000000
## 3     3   200 TanhDropout 50.00 % 0.000010 0.000010  0.009633 0.000000
## 4     4     2     Softmax         0.000010 0.000010  0.009633 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.501522   -0.000015   0.111337 -0.001882 0.014071
## 3 0.501522    0.000319   0.069381  0.000280 0.010010
## 4 0.501522    0.007925   0.113197  0.000000 0.104869
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1638547
## R^2:  0.3394662
## LogLoss:  0.5195897
## AUC:  0.8535398
## Gini:  0.7070796
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    313 143 0.313596   =143/456
## UP       78 466 0.143382    =78/544
## Totals  391 609 0.221000  =221/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.476420 0.808326 228
## 2                     max f2  0.097218 0.883148 337
## 3               max f0point5  0.775384 0.816285 139
## 4               max accuracy  0.775384 0.779000 139
## 5              max precision  0.996357 1.000000   0
## 6           max absolute_MCC  0.775384 0.565297 139
## 7 max min_per_class_accuracy  0.689625 0.776316 170
## 
## 
## 
## [[8]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_38 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 62,502 weights/biases, 498.1 KB, 35,198 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  0.00 %                                     
## 2     2   100 TanhDropout 50.00 % 0.000010 0.000010  0.019320 0.000000
## 3     3   300 TanhDropout 50.00 % 0.000010 0.000010  0.019320 0.000000
## 4     4   100 TanhDropout 50.00 % 0.000010 0.000010  0.019320 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.019320 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.501408   -0.022578   0.543596  0.015194 0.310810
## 3 0.501408    0.000545   0.081921  0.004647 0.048052
## 4 0.501408   -0.000104   0.074705 -0.000293 0.049426
## 5 0.501408    0.034776   0.221946 -0.006339 0.256919
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1601879
## R^2:  0.3542477
## LogLoss:  0.5043729
## AUC:  0.8550697
## Gini:  0.7101393
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    349 107 0.234649   =107/456
## UP       92 452 0.169118    =92/544
## Totals  441 559 0.199000  =199/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.652859 0.819583 190
## 2                     max f2  0.205508 0.885363 350
## 3               max f0point5  0.904901 0.826649  81
## 4               max accuracy  0.652859 0.801000 190
## 5              max precision  0.927970 1.000000   0
## 6           max absolute_MCC  0.652859 0.598099 190
## 7 max min_per_class_accuracy  0.776035 0.792279 161
## 
## 
## 
## [[9]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_12 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 27,452 weights/biases, 223.1 KB, 78,000 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  0.00 %                                     
## 2     2   100 TanhDropout 50.00 % 0.000010 0.000010  0.009992 0.000000
## 3     3   100 TanhDropout 50.00 % 0.000010 0.000010  0.009992 0.000000
## 4     4   150 TanhDropout 50.00 % 0.000010 0.000010  0.009992 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.009992 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.503120   -0.010517   0.592971  0.013215 0.334167
## 3 0.503120    0.000421   0.144646  0.008034 0.063836
## 4 0.503120    0.000035   0.093813  0.000042 0.030616
## 5 0.503120    0.014526   0.193559  0.001651 0.305373
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1647713
## R^2:  0.3357711
## LogLoss:  0.5396234
## AUC:  0.8626866
## Gini:  0.7253733
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    320 136 0.298246   =136/456
## UP       68 476 0.125000    =68/544
## Totals  388 612 0.204000  =204/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.251860 0.823529 258
## 2                     max f2  0.115894 0.885860 369
## 3               max f0point5  0.934725 0.825390  97
## 4               max accuracy  0.430301 0.798000 231
## 5              max precision  0.966775 0.936170   1
## 6           max absolute_MCC  0.430301 0.591892 231
## 7 max min_per_class_accuracy  0.755453 0.787281 174
## 
## 
## 
## [[10]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_22 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 62,502 weights/biases, 498.1 KB, 27,277 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  0.00 %                                     
## 2     2   100 TanhDropout 50.00 % 0.000010 0.000010  0.019946 0.000000
## 3     3   300 TanhDropout 50.00 % 0.000010 0.000010  0.019946 0.000000
## 4     4   100 TanhDropout 50.00 % 0.000010 0.000010  0.019946 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.019946 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.501091   -0.012316   0.421154 -0.010069 0.154999
## 3 0.501091    0.000306   0.074092  0.003211 0.032471
## 4 0.501091   -0.000050   0.070647 -0.002242 0.037002
## 5 0.501091    0.034893   0.222972 -0.000000 0.182359
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1591106
## R^2:  0.3585906
## LogLoss:  0.5046268
## AUC:  0.8624206
## Gini:  0.7248412
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    324 132 0.289474   =132/456
## UP       77 467 0.141544    =77/544
## Totals  401 599 0.209000  =209/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.466481 0.817148 221
## 2                     max f2  0.159670 0.883063 316
## 3               max f0point5  0.882114 0.821918 102
## 4               max accuracy  0.673500 0.797000 179
## 5              max precision  0.958677 1.000000   0
## 6           max absolute_MCC  0.673500 0.590275 179
## 7 max min_per_class_accuracy  0.757359 0.789474 158
## 
## 
## 
## [[11]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_18 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 44,402 weights/biases, 355.3 KB, 103,180 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  0.00 %                                     
## 2     2   200 TanhDropout 50.00 % 0.000010 0.000010  0.019796 0.000000
## 3     3   200 TanhDropout 50.00 % 0.000010 0.000010  0.019796 0.000000
## 4     4     2     Softmax         0.000010 0.000010  0.019796 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.504127   -0.007860   0.459964  0.008145 0.412866
## 3 0.504127    0.000041   0.086202  0.012243 0.101060
## 4 0.504127    0.013444   0.167717  0.005278 0.233918
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1506486
## R^2:  0.3927025
## LogLoss:  0.4955137
## AUC:  0.8841327
## Gini:  0.7682654
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    332 124 0.271930   =124/456
## UP       73 471 0.134191    =73/544
## Totals  405 595 0.197000  =197/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.481713 0.827041 205
## 2                     max f2  0.065697 0.887803 342
## 3               max f0point5  0.895947 0.854922  96
## 4               max accuracy  0.882916 0.808000 102
## 5              max precision  0.995050 1.000000   0
## 6           max absolute_MCC  0.891412 0.628681  99
## 7 max min_per_class_accuracy  0.722960 0.795956 148
## 
## 
## 
## [[12]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_11 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 511,502 weights/biases, 3.9 MB, 25,354 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  5.00 %                                     
## 2     2   500 TanhDropout 50.00 % 0.000010 0.000010  0.019995 0.000000
## 3     3   500 TanhDropout 50.00 % 0.000010 0.000010  0.019995 0.000000
## 4     4   500 TanhDropout 50.00 % 0.000010 0.000010  0.019995 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.019995 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.501014    0.000197   0.127242  0.000676 0.038729
## 3 0.501014   -0.000126   0.044810  0.000589 0.019292
## 4 0.501014   -0.000149   0.044206 -0.000485 0.009341
## 5 0.501014   -0.003272   0.103404  0.004568 0.120445
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1498031
## R^2:  0.396111
## LogLoss:  0.4653506
## AUC:  0.8629608
## Gini:  0.7259215
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    313 143 0.313596   =143/456
## UP       69 475 0.126838    =69/544
## Totals  382 618 0.212000  =212/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.431493 0.817556 236
## 2                     max f2  0.159043 0.885734 331
## 3               max f0point5  0.681070 0.821372 158
## 4               max accuracy  0.647639 0.795000 169
## 5              max precision  0.964970 1.000000   0
## 6           max absolute_MCC  0.647639 0.589913 169
## 7 max min_per_class_accuracy  0.632264 0.794118 176
## 
## 
## 
## [[13]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_46 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 27,452 weights/biases, 223.1 KB, 54,000 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  0.00 %                                     
## 2     2   100 TanhDropout 50.00 % 0.000010 0.000010  0.018975 0.000000
## 3     3   100 TanhDropout 50.00 % 0.000010 0.000010  0.018975 0.000000
## 4     4   150 TanhDropout 50.00 % 0.000010 0.000010  0.018975 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.018975 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.502160   -0.017688   0.731784  0.063544 0.464064
## 3 0.502160   -0.000063   0.191040 -0.006833 0.124393
## 4 0.502160    0.000322   0.096922 -0.007678 0.055996
## 5 0.502160    0.006844   0.162088  0.009850 0.185192
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1725589
## R^2:  0.3043773
## LogLoss:  0.5528586
## AUC:  0.8322892
## Gini:  0.6645785
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    320 136 0.298246   =136/456
## UP       77 467 0.141544    =77/544
## Totals  397 603 0.213000  =213/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.329443 0.814298 246
## 2                     max f2  0.116535 0.878057 367
## 3               max f0point5  0.860166 0.805085 156
## 4               max accuracy  0.657481 0.789000 198
## 5              max precision  0.939729 0.868805  42
## 6           max absolute_MCC  0.657481 0.574017 198
## 7 max min_per_class_accuracy  0.821158 0.778509 168
## 
## 
## 
## [[14]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_4 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 62,502 weights/biases, 498.1 KB, 42,705 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  0.00 %                                     
## 2     2   100 TanhDropout 50.00 % 0.000010 0.000010  0.009996 0.000000
## 3     3   300 TanhDropout 50.00 % 0.000010 0.000010  0.009996 0.000000
## 4     4   100 TanhDropout 50.00 % 0.000010 0.000010  0.009996 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.009996 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.501708   -0.007007   0.489841  0.022898 0.271116
## 3 0.501708    0.000464   0.077390  0.002201 0.030772
## 4 0.501708   -0.000072   0.070950 -0.000540 0.030440
## 5 0.501708    0.034765   0.219779 -0.008417 0.217558
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1517823
## R^2:  0.3881324
## LogLoss:  0.4864908
## AUC:  0.8777493
## Gini:  0.7554986
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    344 112 0.245614   =112/456
## UP       81 463 0.148897    =81/544
## Totals  425 575 0.193000  =193/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.475785 0.827525 220
## 2                     max f2  0.116159 0.887236 332
## 3               max f0point5  0.889513 0.845218 121
## 4               max accuracy  0.837014 0.809000 146
## 5              max precision  0.963971 1.000000   0
## 6           max absolute_MCC  0.837014 0.622098 146
## 7 max min_per_class_accuracy  0.726068 0.799632 178
## 
## 
## 
## [[15]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_16 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 44,402 weights/biases, 355.3 KB, 95,536 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  0.00 %                                     
## 2     2   200 TanhDropout 50.00 % 0.000010 0.000010  0.009905 0.000000
## 3     3   200 TanhDropout 50.00 % 0.000010 0.000010  0.009905 0.000000
## 4     4     2     Softmax         0.000010 0.000010  0.009905 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.503821   -0.004590   0.232665  0.013103 0.154397
## 3 0.503821    0.000355   0.068312  0.000497 0.035599
## 4 0.503821    0.013785   0.169529 -0.002265 0.322088
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1467153
## R^2:  0.4085587
## LogLoss:  0.4642349
## AUC:  0.8779952
## Gini:  0.7559904
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    336 120 0.263158   =120/456
## UP       77 467 0.141544    =77/544
## Totals  413 587 0.197000  =197/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.507021 0.825818 214
## 2                     max f2  0.140230 0.887152 321
## 3               max f0point5  0.830500 0.843271 120
## 4               max accuracy  0.783344 0.804000 138
## 5              max precision  0.990602 1.000000   0
## 6           max absolute_MCC  0.783344 0.613946 138
## 7 max min_per_class_accuracy  0.678521 0.798246 170
## 
## 
## 
## [[16]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_25 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 511,502 weights/biases, 3.9 MB, 25,056 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  5.00 %                                     
## 2     2   500 TanhDropout 50.00 % 0.000010 0.000010  0.009975 0.000000
## 3     3   500 TanhDropout 50.00 % 0.000010 0.000010  0.009975 0.000000
## 4     4   500 TanhDropout 50.00 % 0.000010 0.000010  0.009975 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.009975 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.501002    0.000301   0.093910 -0.000755 0.015486
## 3 0.501002   -0.000119   0.046047 -0.000359 0.007668
## 4 0.501002   -0.000152   0.045777 -0.000446 0.006531
## 5 0.501002   -0.002082   0.065352 -0.000000 0.124258
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.153262
## R^2:  0.3821674
## LogLoss:  0.4728607
## AUC:  0.8593629
## Gini:  0.7187258
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    300 156 0.342105   =156/456
## UP       61 483 0.112132    =61/544
## Totals  361 639 0.217000  =217/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.426005 0.816568 241
## 2                     max f2  0.145877 0.885762 347
## 3               max f0point5  0.781258 0.814261 126
## 4               max accuracy  0.613774 0.788000 187
## 5              max precision  0.970464 1.000000   0
## 6           max absolute_MCC  0.613774 0.572406 187
## 7 max min_per_class_accuracy  0.669882 0.784926 172
## 
## 
## 
## [[17]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_23 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 62,502 weights/biases, 498.1 KB, 43,565 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  5.00 %                                     
## 2     2   100 TanhDropout 50.00 % 0.000010 0.000010  0.019913 0.000000
## 3     3   300 TanhDropout 50.00 % 0.000010 0.000010  0.019913 0.000000
## 4     4   100 TanhDropout 50.00 % 0.000010 0.000010  0.019913 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.019913 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.501743    0.026653   0.486142  0.003011 0.247115
## 3 0.501743    0.000514   0.076556  0.001091 0.046980
## 4 0.501743    0.000073   0.071558  0.005225 0.050659
## 5 0.501743    0.034771   0.222714  0.004242 0.230683
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.173379
## R^2:  0.3010714
## LogLoss:  0.5766516
## AUC:  0.8568353
## Gini:  0.7136707
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    326 130 0.285088   =130/456
## UP       80 464 0.147059    =80/544
## Totals  406 594 0.210000  =210/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.398586 0.815466 222
## 2                     max f2  0.080227 0.882453 352
## 3               max f0point5  0.894103 0.826104 131
## 4               max accuracy  0.894103 0.796000 131
## 5              max precision  0.973969 0.938462   3
## 6           max absolute_MCC  0.894103 0.594664 131
## 7 max min_per_class_accuracy  0.833823 0.779412 154
## 
## 
## 
## [[18]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_1 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 44,402 weights/biases, 355.3 KB, 60,030 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  5.00 %                                     
## 2     2   200 TanhDropout 50.00 % 0.000010 0.000010  0.009994 0.000000
## 3     3   200 TanhDropout 50.00 % 0.000010 0.000010  0.009994 0.000000
## 4     4     2     Softmax         0.000010 0.000010  0.009994 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.502401   -0.002312   0.186890  0.000629 0.101840
## 3 0.502401    0.000339   0.067567  0.000232 0.026308
## 4 0.502401    0.013836   0.170846  0.002280 0.309615
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1576219
## R^2:  0.3645917
## LogLoss:  0.5000199
## AUC:  0.8668126
## Gini:  0.7336252
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    315 141 0.309211   =141/456
## UP       72 472 0.132353    =72/544
## Totals  387 613 0.213000  =213/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.460468 0.815903 220
## 2                     max f2  0.080929 0.884603 351
## 3               max f0point5  0.878813 0.831105  99
## 4               max accuracy  0.723686 0.797000 150
## 5              max precision  0.991862 1.000000   0
## 6           max absolute_MCC  0.723686 0.593385 150
## 7 max min_per_class_accuracy  0.707524 0.794118 154
## 
## 
## 
## [[19]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_9 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 511,502 weights/biases, 3.9 MB, 16,805 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  5.00 %                                     
## 2     2   500 TanhDropout 50.00 % 0.000010 0.000010  0.009998 0.000000
## 3     3   500 TanhDropout 50.00 % 0.000010 0.000010  0.009998 0.000000
## 4     4   500 TanhDropout 50.00 % 0.000010 0.000010  0.009998 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.009998 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.500672    0.000064   0.094250 -0.001274 0.018597
## 3 0.500672   -0.000141   0.045908  0.000413 0.009603
## 4 0.500672   -0.000157   0.045760 -0.000192 0.008038
## 5 0.500672   -0.003330   0.105658 -0.000025 0.119857
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1604394
## R^2:  0.353234
## LogLoss:  0.4979507
## AUC:  0.8587038
## Gini:  0.7174076
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    286 170 0.372807   =170/456
## UP       58 486 0.106618    =58/544
## Totals  344 656 0.228000  =228/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.351291 0.810000 259
## 2                     max f2  0.103772 0.884218 344
## 3               max f0point5  0.768000 0.821429 145
## 4               max accuracy  0.761827 0.791000 148
## 5              max precision  0.987989 1.000000   0
## 6           max absolute_MCC  0.761827 0.584091 148
## 7 max min_per_class_accuracy  0.705000 0.787281 167
## 
## 
## 
## [[20]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_36 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 62,502 weights/biases, 498.1 KB, 35,198 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  0.00 %                                     
## 2     2   100 TanhDropout 50.00 % 0.000010 0.000010  0.009660 0.000000
## 3     3   300 TanhDropout 50.00 % 0.000010 0.000010  0.009660 0.000000
## 4     4   100 TanhDropout 50.00 % 0.000010 0.000010  0.009660 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.009660 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.501408   -0.002104   0.437015 -0.002230 0.229438
## 3 0.501408    0.000507   0.075321  0.000225 0.024598
## 4 0.501408    0.000120   0.070200 -0.003835 0.026060
## 5 0.501408    0.034819   0.219865 -0.000000 0.249070
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1509604
## R^2:  0.3914457
## LogLoss:  0.478935
## AUC:  0.8743429
## Gini:  0.7486858
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    336 120 0.263158   =120/456
## UP       76 468 0.139706    =76/544
## Totals  412 588 0.196000  =196/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.408951 0.826855 237
## 2                     max f2  0.133417 0.886986 344
## 3               max f0point5  0.872993 0.842483 122
## 4               max accuracy  0.408951 0.804000 237
## 5              max precision  0.961425 1.000000   0
## 6           max absolute_MCC  0.841574 0.610831 138
## 7 max min_per_class_accuracy  0.670065 0.798246 181
## 
## 
## 
## [[21]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_39 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 62,502 weights/biases, 498.1 KB, 35,198 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  5.00 %                                     
## 2     2   100 TanhDropout 50.00 % 0.000010 0.000010  0.019320 0.000000
## 3     3   300 TanhDropout 50.00 % 0.000010 0.000010  0.019320 0.000000
## 4     4   100 TanhDropout 50.00 % 0.000010 0.000010  0.019320 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.019320 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.501408   -0.001232   0.499727  0.013557 0.242467
## 3 0.501408    0.000697   0.078178  0.000707 0.042336
## 4 0.501408    0.000221   0.072987  0.005057 0.045531
## 5 0.501408    0.025498   0.194713 -0.007165 0.210428
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1703277
## R^2:  0.3133718
## LogLoss:  0.5386076
## AUC:  0.8512985
## Gini:  0.7025969
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    340 116 0.254386   =116/456
## UP       96 448 0.176471    =96/544
## Totals  436 564 0.212000  =212/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.619251 0.808664 198
## 2                     max f2  0.134722 0.882353 346
## 3               max f0point5  0.909962 0.816413 104
## 4               max accuracy  0.646314 0.788000 196
## 5              max precision  0.945883 1.000000   0
## 6           max absolute_MCC  0.646314 0.571701 196
## 7 max min_per_class_accuracy  0.786800 0.780702 162
## 
## 
## 
## [[22]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_44 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 27,452 weights/biases, 223.1 KB, 78,000 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  0.00 %                                     
## 2     2   100 TanhDropout 50.00 % 0.000010 0.000010  0.009276 0.000000
## 3     3   100 TanhDropout 50.00 % 0.000010 0.000010  0.009276 0.000000
## 4     4   150 TanhDropout 50.00 % 0.000010 0.000010  0.009276 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.009276 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.503120   -0.014628   0.445644  0.023992 0.180828
## 3 0.503120    0.000110   0.122202 -0.002477 0.060277
## 4 0.503120    0.000320   0.093474 -0.003678 0.024477
## 5 0.503120    0.014479   0.195138  0.000524 0.238568
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.168018
## R^2:  0.3226827
## LogLoss:  0.5481999
## AUC:  0.8584922
## Gini:  0.7169843
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    324 132 0.289474   =132/456
## UP       76 468 0.139706    =76/544
## Totals  400 600 0.208000  =208/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.331712 0.818182 225
## 2                     max f2  0.104276 0.881757 357
## 3               max f0point5  0.928712 0.829187 101
## 4               max accuracy  0.786646 0.794000 159
## 5              max precision  0.964956 1.000000   0
## 6           max absolute_MCC  0.928712 0.587362 101
## 7 max min_per_class_accuracy  0.821594 0.787281 154
## 
## 
## 
## [[23]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_45 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 27,452 weights/biases, 223.1 KB, 78,000 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  5.00 %                                     
## 2     2   100 TanhDropout 50.00 % 0.000010 0.000010  0.009276 0.000000
## 3     3   100 TanhDropout 50.00 % 0.000010 0.000010  0.009276 0.000000
## 4     4   150 TanhDropout 50.00 % 0.000010 0.000010  0.009276 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.009276 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.503120   -0.046467   0.592000 -0.001315 0.321958
## 3 0.503120   -0.001237   0.148049  0.003970 0.070714
## 4 0.503120    0.000267   0.093688 -0.003360 0.028881
## 5 0.503120    0.014472   0.192572  0.004852 0.226244
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1697848
## R^2:  0.3155603
## LogLoss:  0.5463388
## AUC:  0.8559666
## Gini:  0.7119332
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    309 147 0.322368   =147/456
## UP       67 477 0.123162    =67/544
## Totals  376 624 0.214000  =214/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.210996 0.816781 278
## 2                     max f2  0.123385 0.883642 390
## 3               max f0point5  0.923767 0.817953 115
## 4               max accuracy  0.737426 0.790000 193
## 5              max precision  0.949928 0.920000   1
## 6           max absolute_MCC  0.737426 0.577214 193
## 7 max min_per_class_accuracy  0.797093 0.785088 181
## 
## 
## 
## [[24]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_31 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 27,452 weights/biases, 223.1 KB, 60,000 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  5.00 %                                     
## 2     2   100 TanhDropout 50.00 % 0.000010 0.000010  0.019881 0.000000
## 3     3   100 TanhDropout 50.00 % 0.000010 0.000010  0.019881 0.000000
## 4     4   150 TanhDropout 50.00 % 0.000010 0.000010  0.019881 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.019881 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.502400   -0.008616   0.335211  0.005017 0.131337
## 3 0.502400    0.000702   0.110490 -0.005770 0.056900
## 4 0.502400    0.000821   0.092804 -0.007438 0.019982
## 5 0.502400    0.006611   0.162674 -0.000570 0.095912
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1793299
## R^2:  0.2770823
## LogLoss:  0.5714629
## AUC:  0.841809
## Gini:  0.6836179
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    297 159 0.348684   =159/456
## UP       71 473 0.130515    =71/544
## Totals  368 632 0.230000  =230/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.343142 0.804422 252
## 2                     max f2  0.084971 0.879959 355
## 3               max f0point5  0.921189 0.798923  79
## 4               max accuracy  0.629063 0.776000 194
## 5              max precision  0.957669 1.000000   0
## 6           max absolute_MCC  0.629063 0.547131 194
## 7 max min_per_class_accuracy  0.781244 0.763158 149
## 
## 
## 
## [[25]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_32 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 44,402 weights/biases, 355.3 KB, 68,418 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  0.00 %                                     
## 2     2   200 TanhDropout 50.00 % 0.000010 0.000010  0.009360 0.000000
## 3     3   200 TanhDropout 50.00 % 0.000010 0.000010  0.009360 0.000000
## 4     4     2     Softmax         0.000010 0.000010  0.009360 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.502737   -0.000533   0.193340  0.006494 0.096838
## 3 0.502737    0.000322   0.068723  0.000833 0.025193
## 4 0.502737    0.013890   0.171380 -0.000067 0.211800
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1497868
## R^2:  0.3961768
## LogLoss:  0.476127
## AUC:  0.8738894
## Gini:  0.7477788
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    340 116 0.254386   =116/456
## UP       82 462 0.150735    =82/544
## Totals  422 578 0.198000  =198/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.500601 0.823529 213
## 2                     max f2  0.050227 0.884970 366
## 3               max f0point5  0.813734 0.844652 129
## 4               max accuracy  0.500601 0.802000 213
## 5              max precision  0.992143 1.000000   0
## 6           max absolute_MCC  0.813734 0.611340 129
## 7 max min_per_class_accuracy  0.645562 0.793860 178
## 
## 
## 
## [[26]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_42 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 511,502 weights/biases, 3.9 MB, 28,963 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  0.00 %                                     
## 2     2   500 TanhDropout 50.00 % 0.000010 0.000010  0.019437 0.000000
## 3     3   500 TanhDropout 50.00 % 0.000010 0.000010  0.019437 0.000000
## 4     4   500 TanhDropout 50.00 % 0.000010 0.000010  0.019437 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.019437 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.501159    0.000410   0.173602 -0.003464 0.087014
## 3 0.501159   -0.000122   0.043874  0.000515 0.020670
## 4 0.501159   -0.000128   0.042777  0.000075 0.009551
## 5 0.501159   -0.003210   0.102858 -0.002871 0.186586
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1475605
## R^2:  0.4051516
## LogLoss:  0.4615381
## AUC:  0.8739821
## Gini:  0.7479642
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    301 155 0.339912   =155/456
## UP       57 487 0.104779    =57/544
## Totals  358 642 0.212000  =212/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.331632 0.821248 260
## 2                     max f2  0.155194 0.883569 316
## 3               max f0point5  0.808141 0.839844 127
## 4               max accuracy  0.771416 0.798000 142
## 5              max precision  0.984528 1.000000   0
## 6           max absolute_MCC  0.771416 0.605216 142
## 7 max min_per_class_accuracy  0.655982 0.793860 178
## 
## 
## 
## [[27]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_41 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 511,502 weights/biases, 3.9 MB, 43,784 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  5.00 %                                     
## 2     2   500 TanhDropout 50.00 % 0.000010 0.000010  0.009581 0.000000
## 3     3   500 TanhDropout 50.00 % 0.000010 0.000010  0.009581 0.000000
## 4     4   500 TanhDropout 50.00 % 0.000010 0.000010  0.009581 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.009581 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.501751   -0.000865   0.112472  0.002311 0.046905
## 3 0.501751   -0.000145   0.044725  0.000356 0.012833
## 4 0.501751   -0.000146   0.044287 -0.000315 0.005286
## 5 0.501751   -0.003271   0.103187  0.001227 0.121036
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1467692
## R^2:  0.4083415
## LogLoss:  0.4583967
## AUC:  0.8675705
## Gini:  0.7351409
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    361  95 0.208333    =95/456
## UP      100 444 0.183824   =100/544
## Totals  461 539 0.195000  =195/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.571901 0.819945 199
## 2                     max f2  0.121704 0.885135 338
## 3               max f0point5  0.607935 0.831422 188
## 4               max accuracy  0.607935 0.808000 188
## 5              max precision  0.978624 1.000000   0
## 6           max absolute_MCC  0.607935 0.615618 188
## 7 max min_per_class_accuracy  0.596209 0.804825 192
## 
## 
## 
## [[28]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_10 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 511,502 weights/biases, 3.9 MB, 25,354 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  0.00 %                                     
## 2     2   500 TanhDropout 50.00 % 0.000010 0.000010  0.019995 0.000000
## 3     3   500 TanhDropout 50.00 % 0.000010 0.000010  0.019995 0.000000
## 4     4   500 TanhDropout 50.00 % 0.000010 0.000010  0.019995 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.019995 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.501014    0.001021   0.159039 -0.001734 0.058300
## 3 0.501014   -0.000139   0.044215 -0.000231 0.023359
## 4 0.501014   -0.000164   0.043209  0.000679 0.011522
## 5 0.501014   -0.002221   0.071740  0.001223 0.204515
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1595327
## R^2:  0.356889
## LogLoss:  0.5579547
## AUC:  0.8710333
## Gini:  0.7420666
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    323 133 0.291667   =133/456
## UP       69 475 0.126838    =69/544
## Totals  392 608 0.202000  =202/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.359766 0.824653 239
## 2                     max f2  0.047279 0.888889 342
## 3               max f0point5  0.930767 0.833333  88
## 4               max accuracy  0.496429 0.800000 211
## 5              max precision  0.997995 1.000000   0
## 6           max absolute_MCC  0.489367 0.595832 214
## 7 max min_per_class_accuracy  0.703153 0.793860 170
## 
## 
## 
## [[29]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_34 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 44,402 weights/biases, 355.3 KB, 60,813 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  0.00 %                                     
## 2     2   200 TanhDropout 50.00 % 0.000010 0.000010  0.018853 0.000000
## 3     3   200 TanhDropout 50.00 % 0.000010 0.000010  0.018853 0.000000
## 4     4     2     Softmax         0.000010 0.000010  0.018853 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.502432   -0.000778   0.342567 -0.004697 0.257620
## 3 0.502432    0.000247   0.071081  0.000441 0.057294
## 4 0.502432    0.013712   0.171797 -0.018204 0.194183
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1580248
## R^2:  0.3629676
## LogLoss:  0.51412
## AUC:  0.8661293
## Gini:  0.7322586
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    302 154 0.337719   =154/456
## UP       62 482 0.113971    =62/544
## Totals  364 636 0.216000  =216/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.272495 0.816949 260
## 2                     max f2  0.038328 0.880039 373
## 3               max f0point5  0.885274 0.832585 104
## 4               max accuracy  0.727332 0.798000 158
## 5              max precision  0.987921 1.000000   0
## 6           max absolute_MCC  0.727332 0.597634 158
## 7 max min_per_class_accuracy  0.653053 0.789474 174
## 
## 
## 
## [[30]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_47 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 27,452 weights/biases, 223.1 KB, 60,000 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  5.00 %                                     
## 2     2   100 TanhDropout 50.00 % 0.000010 0.000010  0.018868 0.000000
## 3     3   100 TanhDropout 50.00 % 0.000010 0.000010  0.018868 0.000000
## 4     4   150 TanhDropout 50.00 % 0.000010 0.000010  0.018868 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.018868 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.502400   -0.022736   0.342429  0.010774 0.102840
## 3 0.502400    0.001092   0.110536  0.008491 0.041514
## 4 0.502400    0.000663   0.092985 -0.001013 0.019302
## 5 0.502400    0.008202   0.119529  0.000000 0.115061
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1829986
## R^2:  0.2622928
## LogLoss:  0.5976631
## AUC:  0.8444615
## Gini:  0.688923
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    307 149 0.326754   =149/456
## UP       77 467 0.141544    =77/544
## Totals  384 616 0.226000  =226/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.275746 0.805172 250
## 2                     max f2  0.059586 0.880786 377
## 3               max f0point5  0.927939 0.802336  99
## 4               max accuracy  0.759831 0.779000 174
## 5              max precision  0.964679 1.000000   0
## 6           max absolute_MCC  0.759831 0.555351 174
## 7 max min_per_class_accuracy  0.804035 0.770221 164
## 
## 
## 
## [[31]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_28 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 27,452 weights/biases, 223.1 KB, 78,000 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  0.00 %                                     
## 2     2   100 TanhDropout 50.00 % 0.000010 0.000010  0.009923 0.000000
## 3     3   100 TanhDropout 50.00 % 0.000010 0.000010  0.009923 0.000000
## 4     4   150 TanhDropout 50.00 % 0.000010 0.000010  0.009923 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.009923 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.503120   -0.020678   0.433392  0.008388 0.154900
## 3 0.503120    0.000662   0.119402 -0.008209 0.051403
## 4 0.503120    0.000512   0.093283 -0.001823 0.025512
## 5 0.503120    0.014529   0.194474 -0.005560 0.235439
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.166087
## R^2:  0.3304671
## LogLoss:  0.540576
## AUC:  0.8637469
## Gini:  0.7274937
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    326 130 0.285088   =130/456
## UP       75 469 0.137868    =75/544
## Totals  401 599 0.205000  =205/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.415609 0.820647 216
## 2                     max f2  0.106770 0.885773 339
## 3               max f0point5  0.895627 0.830632 114
## 4               max accuracy  0.895627 0.795000 114
## 5              max precision  0.969429 1.000000   0
## 6           max absolute_MCC  0.895627 0.596085 114
## 7 max min_per_class_accuracy  0.821074 0.788603 147
## 
## 
## 
## [[32]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_2 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 44,402 weights/biases, 355.3 KB, 37,506 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  0.00 %                                     
## 2     2   200 TanhDropout 50.00 % 0.000010 0.000010  0.019993 0.000000
## 3     3   200 TanhDropout 50.00 % 0.000010 0.000010  0.019993 0.000000
## 4     4     2     Softmax         0.000010 0.000010  0.019993 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.501500   -0.004589   0.222594 -0.003893 0.101513
## 3 0.501500    0.000425   0.068504  0.000947 0.027207
## 4 0.501500    0.013807   0.174442 -0.000000 0.262489
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1581111
## R^2:  0.3626198
## LogLoss:  0.4972461
## AUC:  0.8621122
## Gini:  0.7242244
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    300 156 0.342105   =156/456
## UP       61 483 0.112132    =61/544
## Totals  361 639 0.217000  =217/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.383185 0.816568 246
## 2                     max f2  0.095099 0.879121 351
## 3               max f0point5  0.823422 0.827703 122
## 4               max accuracy  0.779221 0.788000 140
## 5              max precision  0.984935 1.000000   0
## 6           max absolute_MCC  0.823422 0.580237 122
## 7 max min_per_class_accuracy  0.686865 0.781250 163
## 
## 
## 
## [[33]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_7 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 62,502 weights/biases, 498.1 KB, 35,449 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  5.00 %                                     
## 2     2   100 TanhDropout 50.00 % 0.000010 0.000010  0.019993 0.000000
## 3     3   300 TanhDropout 50.00 % 0.000010 0.000010  0.019993 0.000000
## 4     4   100 TanhDropout 50.00 % 0.000010 0.000010  0.019993 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.019993 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.501418   -0.014674   0.664914  0.116600 0.477172
## 3 0.501418    0.000451   0.094001  0.004636 0.062352
## 4 0.501418    0.000360   0.082317 -0.000567 0.079174
## 5 0.501418    0.019655   0.190907 -0.007378 0.205828
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1624834
## R^2:  0.3449942
## LogLoss:  0.5075857
## AUC:  0.8457999
## Gini:  0.6915997
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    315 141 0.309211   =141/456
## UP       70 474 0.128676    =70/544
## Totals  385 615 0.211000  =211/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.296537 0.817947 266
## 2                     max f2  0.185794 0.883157 361
## 3               max f0point5  0.847823 0.817776 137
## 4               max accuracy  0.675696 0.791000 185
## 5              max precision  0.914198 0.900000  11
## 6           max absolute_MCC  0.675696 0.579700 185
## 7 max min_per_class_accuracy  0.704114 0.785088 178
## 
## 
## 
## [[34]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_29 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 27,452 weights/biases, 223.1 KB, 78,000 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  5.00 %                                     
## 2     2   100 TanhDropout 50.00 % 0.000010 0.000010  0.009923 0.000000
## 3     3   100 TanhDropout 50.00 % 0.000010 0.000010  0.009923 0.000000
## 4     4   150 TanhDropout 50.00 % 0.000010 0.000010  0.009923 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.009923 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.503120   -0.009091   0.417038 -0.007334 0.190441
## 3 0.503120    0.000463   0.119018  0.009932 0.058935
## 4 0.503120    0.000390   0.092972 -0.000255 0.024646
## 5 0.503120    0.014515   0.194747 -0.000187 0.199444
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1698062
## R^2:  0.3154742
## LogLoss:  0.5526462
## AUC:  0.8580769
## Gini:  0.7161539
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    319 137 0.300439   =137/456
## UP       77 467 0.141544    =77/544
## Totals  396 604 0.214000  =214/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.287574 0.813589 239
## 2                     max f2  0.098964 0.883562 352
## 3               max f0point5  0.904030 0.815747 121
## 4               max accuracy  0.538939 0.791000 198
## 5              max precision  0.961005 1.000000   0
## 6           max absolute_MCC  0.816811 0.579454 155
## 7 max min_per_class_accuracy  0.800863 0.788603 160
## 
## 
## 
## [[35]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_21 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 62,502 weights/biases, 498.1 KB, 51,698 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  5.00 %                                     
## 2     2   100 TanhDropout 50.00 % 0.000010 0.000010  0.009949 0.000000
## 3     3   300 TanhDropout 50.00 % 0.000010 0.000010  0.009949 0.000000
## 4     4   100 TanhDropout 50.00 % 0.000010 0.000010  0.009949 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.009949 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.502068    0.013893   0.375206  0.000211 0.180452
## 3 0.502068    0.000508   0.072501 -0.000549 0.021351
## 4 0.502068   -0.000073   0.069600 -0.002541 0.020502
## 5 0.502068    0.034864   0.219382 -0.001530 0.240921
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1583589
## R^2:  0.3616208
## LogLoss:  0.4996875
## AUC:  0.8658008
## Gini:  0.7316015
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    319 137 0.300439   =137/456
## UP       76 468 0.139706    =76/544
## Totals  395 605 0.213000  =213/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.332737 0.814621 249
## 2                     max f2  0.137541 0.885773 346
## 3               max f0point5  0.893742 0.827526 109
## 4               max accuracy  0.785499 0.797000 157
## 5              max precision  0.955918 1.000000   0
## 6           max absolute_MCC  0.785499 0.596138 157
## 7 max min_per_class_accuracy  0.711005 0.793860 177
## 
## 
## 
## [[36]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_3 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 44,402 weights/biases, 355.3 KB, 101,316 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  5.00 %                                     
## 2     2   200 TanhDropout 50.00 % 0.000010 0.000010  0.019980 0.000000
## 3     3   200 TanhDropout 50.00 % 0.000010 0.000010  0.019980 0.000000
## 4     4     2     Softmax         0.000010 0.000010  0.019980 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.504053   -0.001461   0.320524  0.007884 0.232922
## 3 0.504053    0.000125   0.070737 -0.001347 0.051872
## 4 0.504053    0.013509   0.166921 -0.028067 0.236425
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1580222
## R^2:  0.3629781
## LogLoss:  0.5141486
## AUC:  0.8708116
## Gini:  0.7416231
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    339 117 0.256579   =117/456
## UP       88 456 0.161765    =88/544
## Totals  427 573 0.205000  =205/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.459821 0.816473 216
## 2                     max f2  0.037232 0.886724 370
## 3               max f0point5  0.824906 0.833333 131
## 4               max accuracy  0.664951 0.796000 177
## 5              max precision  0.992127 1.000000   0
## 6           max absolute_MCC  0.824906 0.594177 131
## 7 max min_per_class_accuracy  0.664951 0.794118 177
## 
## 
## 
## [[37]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_20 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 62,502 weights/biases, 498.1 KB, 35,434 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  0.00 %                                     
## 2     2   100 TanhDropout 50.00 % 0.000010 0.000010  0.009965 0.000000
## 3     3   300 TanhDropout 50.00 % 0.000010 0.000010  0.009965 0.000000
## 4     4   100 TanhDropout 50.00 % 0.000010 0.000010  0.009965 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.009965 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.501417   -0.009239   0.329782 -0.005930 0.124079
## 3 0.501417    0.000637   0.072196  0.000935 0.021148
## 4 0.501417    0.000255   0.069747 -0.001938 0.023578
## 5 0.501417    0.034926   0.220726 -0.000000 0.235259
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.15852
## R^2:  0.3609712
## LogLoss:  0.511501
## AUC:  0.8697755
## Gini:  0.7395511
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    331 125 0.274123   =125/456
## UP       76 468 0.139706    =76/544
## Totals  407 593 0.201000  =201/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.448600 0.823219 215
## 2                     max f2  0.104005 0.885061 340
## 3               max f0point5  0.914597 0.836938  99
## 4               max accuracy  0.706395 0.799000 166
## 5              max precision  0.976149 1.000000   0
## 6           max absolute_MCC  0.885111 0.599711 117
## 7 max min_per_class_accuracy  0.752769 0.794118 155
## 
## 
## 
## [[38]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_6 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 62,502 weights/biases, 498.1 KB, 34,714 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  0.00 %                                     
## 2     2   100 TanhDropout 50.00 % 0.000010 0.000010  0.019993 0.000000
## 3     3   300 TanhDropout 50.00 % 0.000010 0.000010  0.019993 0.000000
## 4     4   100 TanhDropout 50.00 % 0.000010 0.000010  0.019993 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.019993 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.501389   -0.005960   0.413470  0.001825 0.195113
## 3 0.501389    0.000597   0.074182 -0.002700 0.031141
## 4 0.501389    0.000023   0.070659 -0.001375 0.032502
## 5 0.501389    0.034920   0.222592 -0.000000 0.298186
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1697125
## R^2:  0.315852
## LogLoss:  0.5674432
## AUC:  0.8592783
## Gini:  0.7185565
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    333 123 0.269737   =123/456
## UP       82 462 0.150735    =82/544
## Totals  415 585 0.205000  =205/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.436481 0.818423 210
## 2                     max f2  0.084577 0.880258 350
## 3               max f0point5  0.940330 0.831174  90
## 4               max accuracy  0.461980 0.795000 205
## 5              max precision  0.980934 0.937500   0
## 6           max absolute_MCC  0.436481 0.585807 210
## 7 max min_per_class_accuracy  0.782302 0.791667 155
## 
## 
## 
## [[39]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_40 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 511,502 weights/biases, 3.9 MB, 31,268 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  0.00 %                                     
## 2     2   500 TanhDropout 50.00 % 0.000010 0.000010  0.009697 0.000000
## 3     3   500 TanhDropout 50.00 % 0.000010 0.000010  0.009697 0.000000
## 4     4   500 TanhDropout 50.00 % 0.000010 0.000010  0.009697 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.009697 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.501251    0.000330   0.107060 -0.001394 0.020019
## 3 0.501251   -0.000112   0.045846  0.000618 0.008127
## 4 0.501251   -0.000153   0.045354 -0.000153 0.005183
## 5 0.501251   -0.002379   0.071743 -0.006795 0.108193
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1477379
## R^2:  0.4044363
## LogLoss:  0.4611632
## AUC:  0.8697272
## Gini:  0.7394543
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    332 124 0.271930   =124/456
## UP       77 467 0.141544    =77/544
## Totals  409 591 0.201000  =201/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.504961 0.822907 219
## 2                     max f2  0.148529 0.887262 322
## 3               max f0point5  0.816842 0.837912 106
## 4               max accuracy  0.520948 0.799000 212
## 5              max precision  0.980875 1.000000   0
## 6           max absolute_MCC  0.678347 0.595264 167
## 7 max min_per_class_accuracy  0.663442 0.795956 171
## 
## 
## 
## [[40]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_5 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 62,502 weights/biases, 498.1 KB, 50,711 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  5.00 %                                     
## 2     2   100 TanhDropout 50.00 % 0.000010 0.000010  0.009995 0.000000
## 3     3   300 TanhDropout 50.00 % 0.000010 0.000010  0.009995 0.000000
## 4     4   100 TanhDropout 50.00 % 0.000010 0.000010  0.009995 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.009995 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.502028   -0.002185   0.378752  0.016803 0.155993
## 3 0.502028    0.000528   0.073082  0.001897 0.025055
## 4 0.502028    0.000075   0.069863  0.001789 0.026297
## 5 0.502028    0.034851   0.219681 -0.008052 0.251050
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1582524
## R^2:  0.36205
## LogLoss:  0.4985361
## AUC:  0.8633276
## Gini:  0.7266552
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    328 128 0.280702   =128/456
## UP       79 465 0.145221    =79/544
## Totals  407 593 0.207000  =207/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.411887 0.817942 223
## 2                     max f2  0.144191 0.884387 376
## 3               max f0point5  0.788377 0.825581 149
## 4               max accuracy  0.777294 0.800000 151
## 5              max precision  0.948383 1.000000   0
## 6           max absolute_MCC  0.777294 0.600098 151
## 7 max min_per_class_accuracy  0.722280 0.793860 163
## 
## 
## 
## [[41]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_30 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 27,452 weights/biases, 223.1 KB, 60,000 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  0.00 %                                     
## 2     2   100 TanhDropout 50.00 % 0.000010 0.000010  0.019881 0.000000
## 3     3   100 TanhDropout 50.00 % 0.000010 0.000010  0.019881 0.000000
## 4     4   150 TanhDropout 50.00 % 0.000010 0.000010  0.019881 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.019881 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.502400    0.012785   0.706004  0.028165 0.419705
## 3 0.502400   -0.002535   0.184443  0.013560 0.125341
## 4 0.502400    0.000560   0.097959 -0.008628 0.053093
## 5 0.502400    0.014449   0.195677  0.000000 0.179751
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1781256
## R^2:  0.281937
## LogLoss:  0.6019819
## AUC:  0.8437319
## Gini:  0.6874637
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    310 146 0.320175   =146/456
## UP       70 474 0.128676    =70/544
## Totals  380 620 0.216000  =216/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.292745 0.814433 260
## 2                     max f2  0.062142 0.884034 385
## 3               max f0point5  0.833937 0.806452 157
## 4               max accuracy  0.483232 0.788000 222
## 5              max precision  0.970005 1.000000   0
## 6           max absolute_MCC  0.483232 0.571493 222
## 7 max min_per_class_accuracy  0.805569 0.780702 167
## 
## 
## 
## [[42]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_0 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 44,402 weights/biases, 355.3 KB, 71,314 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  0.00 %                                     
## 2     2   200 TanhDropout 50.00 % 0.000010 0.000010  0.009993 0.000000
## 3     3   200 TanhDropout 50.00 % 0.000010 0.000010  0.009993 0.000000
## 4     4     2     Softmax         0.000010 0.000010  0.009993 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.502853   -0.006274   0.226727  0.006884 0.139376
## 3 0.502853    0.000312   0.068081 -0.000401 0.031713
## 4 0.502853    0.013771   0.170350 -0.005806 0.224348
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1478449
## R^2:  0.4040051
## LogLoss:  0.4683478
## AUC:  0.8794021
## Gini:  0.7588042
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    315 141 0.309211   =141/456
## UP       65 479 0.119485    =65/544
## Totals  380 620 0.206000  =206/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.389473 0.823024 246
## 2                     max f2  0.095350 0.890248 338
## 3               max f0point5  0.822990 0.849057 129
## 4               max accuracy  0.778766 0.805000 147
## 5              max precision  0.993904 1.000000   0
## 6           max absolute_MCC  0.815349 0.618286 132
## 7 max min_per_class_accuracy  0.666536 0.793860 178
## 
## 
## 
## [[43]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_17 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 44,402 weights/biases, 355.3 KB, 61,196 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  5.00 %                                     
## 2     2   200 TanhDropout 50.00 % 0.000010 0.000010  0.009939 0.000000
## 3     3   200 TanhDropout 50.00 % 0.000010 0.000010  0.009939 0.000000
## 4     4     2     Softmax         0.000010 0.000010  0.009939 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.502448   -0.001229   0.145963  0.004735 0.048078
## 3 0.502448    0.000319   0.068633  0.000415 0.015499
## 4 0.502448    0.007672   0.103916 -0.001252 0.209846
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1583713
## R^2:  0.3615708
## LogLoss:  0.4974161
## AUC:  0.8628358
## Gini:  0.7256716
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    345 111 0.243421   =111/456
## UP       97 447 0.178309    =97/544
## Totals  442 558 0.208000  =208/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.607640 0.811252 188
## 2                     max f2  0.106845 0.884836 340
## 3               max f0point5  0.804654 0.825083 128
## 4               max accuracy  0.613969 0.792000 186
## 5              max precision  0.989134 1.000000   0
## 6           max absolute_MCC  0.804654 0.580574 128
## 7 max min_per_class_accuracy  0.690629 0.787281 163
## 
## 
## 
## [[44]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_14 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 27,452 weights/biases, 223.1 KB, 60,000 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  0.00 %                                     
## 2     2   100 TanhDropout 50.00 % 0.000010 0.000010  0.019988 0.000000
## 3     3   100 TanhDropout 50.00 % 0.000010 0.000010  0.019988 0.000000
## 4     4   150 TanhDropout 50.00 % 0.000010 0.000010  0.019988 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.019988 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.502400   -0.009948   0.357332 -0.008261 0.158015
## 3 0.502400    0.000364   0.112145 -0.001479 0.060348
## 4 0.502400    0.000452   0.093119 -0.001465 0.024627
## 5 0.502400    0.014561   0.196793  0.005197 0.165826
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1822611
## R^2:  0.2652659
## LogLoss:  0.6033105
## AUC:  0.8469387
## Gini:  0.6938774
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    321 135 0.296053   =135/456
## UP       83 461 0.152574    =83/544
## Totals  404 596 0.218000  =218/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.529947 0.808772 208
## 2                     max f2  0.068873 0.881861 365
## 3               max f0point5  0.914533 0.803429 111
## 4               max accuracy  0.626679 0.782000 196
## 5              max precision  0.970312 1.000000   0
## 6           max absolute_MCC  0.529947 0.559647 208
## 7 max min_per_class_accuracy  0.847519 0.774123 142
## 
## 
## 
## [[45]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_26 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 511,502 weights/biases, 3.9 MB, 24,414 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  0.00 %                                     
## 2     2   500 TanhDropout 50.00 % 0.000010 0.000010  0.019951 0.000000
## 3     3   500 TanhDropout 50.00 % 0.000010 0.000010  0.019951 0.000000
## 4     4   500 TanhDropout 50.00 % 0.000010 0.000010  0.019951 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.019951 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.500977   -0.000888   0.137980  0.000567 0.036070
## 3 0.500977   -0.000120   0.045072 -0.000396 0.015557
## 4 0.500977   -0.000144   0.044372  0.000069 0.008838
## 5 0.500977   -0.001800   0.061937  0.003211 0.161180
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1572172
## R^2:  0.3662232
## LogLoss:  0.5177955
## AUC:  0.870991
## Gini:  0.7419819
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    325 131 0.287281   =131/456
## UP       73 471 0.134191    =73/544
## Totals  398 602 0.204000  =204/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.317053 0.821990 255
## 2                     max f2  0.058175 0.887179 345
## 3               max f0point5  0.887752 0.833333 103
## 4               max accuracy  0.454482 0.798000 223
## 5              max precision  0.993400 1.000000   0
## 6           max absolute_MCC  0.779720 0.598813 154
## 7 max min_per_class_accuracy  0.656022 0.786765 185
## 
## 
## 
## [[46]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_37 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 62,502 weights/biases, 498.1 KB, 35,198 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  5.00 %                                     
## 2     2   100 TanhDropout 50.00 % 0.000010 0.000010  0.009660 0.000000
## 3     3   300 TanhDropout 50.00 % 0.000010 0.000010  0.009660 0.000000
## 4     4   100 TanhDropout 50.00 % 0.000010 0.000010  0.009660 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.009660 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.501408   -0.000825   0.379402  0.009738 0.184564
## 3 0.501408    0.000469   0.073418  0.003138 0.024328
## 4 0.501408    0.000052   0.070064 -0.005376 0.025090
## 5 0.501408    0.034866   0.218840  0.000501 0.249063
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1547744
## R^2:  0.3760708
## LogLoss:  0.4879292
## AUC:  0.8672439
## Gini:  0.7344879
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    339 117 0.256579   =117/456
## UP       87 457 0.159926    =87/544
## Totals  426 574 0.204000  =204/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.443956 0.817531 221
## 2                     max f2  0.161765 0.886598 334
## 3               max f0point5  0.806223 0.829984 143
## 4               max accuracy  0.665209 0.799000 175
## 5              max precision  0.953715 1.000000   0
## 6           max absolute_MCC  0.806223 0.597013 143
## 7 max min_per_class_accuracy  0.665209 0.797794 175
## 
## 
## 
## [[47]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_13 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 27,452 weights/biases, 223.1 KB, 60,000 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  5.00 %                                     
## 2     2   100 TanhDropout 50.00 % 0.000010 0.000010  0.009994 0.000000
## 3     3   100 TanhDropout 50.00 % 0.000010 0.000010  0.009994 0.000000
## 4     4   150 TanhDropout 50.00 % 0.000010 0.000010  0.009994 0.000000
## 5     5     2     Softmax         0.000010 0.000010  0.009994 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.502400   -0.003168   0.406608  0.008546 0.143902
## 3 0.502400    0.000408   0.118265 -0.005543 0.044146
## 4 0.502400    0.000236   0.092947  0.000087 0.023300
## 5 0.502400    0.008255   0.116286 -0.007120 0.204662
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1719011
## R^2:  0.3070292
## LogLoss:  0.5648556
## AUC:  0.8555977
## Gini:  0.7111955
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    330 126 0.276316   =126/456
## UP       83 461 0.152574    =83/544
## Totals  413 587 0.209000  =209/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.401101 0.815208 223
## 2                     max f2  0.087869 0.883055 355
## 3               max f0point5  0.924140 0.818106 111
## 4               max accuracy  0.401101 0.791000 223
## 5              max precision  0.966309 1.000000   0
## 6           max absolute_MCC  0.803280 0.578448 166
## 7 max min_per_class_accuracy  0.803280 0.789474 166
## 
## 
## 
## [[48]]
## Model Details:
## ==============
## 
## H2OBinomialModel: deeplearning
## Model ID:  dl_grid_model_1457905851217_8_35 
## Status of Neuron Layers: predicting Class, 2-class classification, bernoulli distribution, CrossEntropy loss, 44,402 weights/biases, 355.3 KB, 72,224 training samples, mini-batch size 1
##   layer units        type dropout       l1       l2 mean_rate rate_RMS
## 1     1    18       Input  5.00 %                                     
## 2     2   200 TanhDropout 50.00 % 0.000010 0.000010  0.018653 0.000000
## 3     3   200 TanhDropout 50.00 % 0.000010 0.000010  0.018653 0.000000
## 4     4     2     Softmax         0.000010 0.000010  0.018653 0.000000
##   momentum mean_weight weight_RMS mean_bias bias_RMS
## 1                                                   
## 2 0.502889    0.000825   0.220521  0.007287 0.117229
## 3 0.502889    0.000357   0.067448 -0.004574 0.034544
## 4 0.502889    0.010720   0.140193 -0.001015 0.143632
## 
## 
## H2OBinomialMetrics: deeplearning
## ** Reported on training data. **
## Description: Metrics reported on temporary (load-balanced) training frame
## 
## MSE:  0.1557257
## R^2:  0.3722358
## LogLoss:  0.4854518
## AUC:  0.8619106
## Gini:  0.7238213
## 
## Confusion Matrix for F1-optimal threshold:
##        DOWN  UP    Error       Rate
## DOWN    331 125 0.274123   =125/456
## UP       85 459 0.156250    =85/544
## Totals  416 584 0.210000  =210/1000
## 
## Maximum Metrics: Maximum metrics at their respective thresholds
##                       metric threshold    value idx
## 1                     max f1  0.508937 0.813830 211
## 2                     max f2  0.120745 0.883745 337
## 3               max f0point5  0.830979 0.820730 111
## 4               max accuracy  0.663792 0.794000 168
## 5              max precision  0.976455 1.000000   0
## 6           max absolute_MCC  0.663792 0.586726 168
## 7 max min_per_class_accuracy  0.663792 0.792279 168
library(nnet)
 
nn <- nnet(Class ~ ., data =TrainingSet, size = 2,  maxit = 200)
## # weights:  41
## initial  value 698.326213 
## iter  10 value 522.644052
## iter  20 value 455.995328
## iter  30 value 447.088181
## iter  40 value 439.559102
## iter  50 value 434.328688
## iter  60 value 431.843275
## iter  70 value 430.508417
## iter  80 value 429.779245
## iter  90 value 429.550510
## iter 100 value 428.979007
## iter 110 value 426.460252
## iter 120 value 424.066559
## iter 130 value 423.152778
## iter 140 value 422.200419
## iter 150 value 421.838685
## iter 160 value 421.337656
## iter 170 value 421.060114
## iter 180 value 420.954865
## iter 190 value 420.890612
## iter 200 value 420.781136
## final  value 420.781136 
## stopped after 200 iterations
nnPred<-predict(nn,TestSet,type = "class")

confusionMatrix(nnPred,TestClass)
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction DOWN  UP
##       DOWN  184  91
##       UP     42 160
##                                          
##                Accuracy : 0.7212         
##                  95% CI : (0.6786, 0.761)
##     No Information Rate : 0.5262         
##     P-Value [Acc > NIR] : < 2.2e-16      
##                                          
##                   Kappa : 0.4468         
##  Mcnemar's Test P-Value : 3.153e-05      
##                                          
##             Sensitivity : 0.8142         
##             Specificity : 0.6375         
##          Pos Pred Value : 0.6691         
##          Neg Pred Value : 0.7921         
##              Prevalence : 0.4738         
##          Detection Rate : 0.3857         
##    Detection Prevalence : 0.5765         
##       Balanced Accuracy : 0.7258         
##                                          
##        'Positive' Class : DOWN           
##