####R code to fit titrating delay discounting task using Mazur (1987), Myerson and Green (1999) and Rachlin (2001) hyperbolic and hyperboloid models####


#Mazur: V = A/(1+kD)

#Myerson & Green: V = A/((1+kD)^s)

#Rachlin: V = A/(1+kD^s)

###Non-linear Least Squares is the fit method####

#Call in data-set
discounting <- read.table("Data.txt", header=TRUE, sep="\t", na.strings="NA", dec=".", strip.white=TRUE)
#This data-set contains six indifference points from a money delay discounting task and can be separated by group (non-smoker = 0, smoker = 1). 


#Call in necessary libraries
library(psych) #For descriptive statistics

library(nlmrt) #Non-linear regression, especially good for models with residuals that are close to 0. Normal nls command may not work when residuals are close to 0.

library (plyr) #Includes function to run loop for individual fits



####Group analysis####

#Create time variable for 1 week, 2 weeks, 1 month, 6 months, 5 years & 25 years. Units will be in months
time <- c(.25, .5, 1, 6, 60, 300) #Creates time variable


#Create "V" values for each commodity
describeBy(discounting) #Obtain median values for indifference points.
## Warning in describeBy(discounting): no grouping variable requested
##             vars  n  mean    sd median trimmed   mad  min max range  skew
## Participant    1 65 35.65 20.98  35.00   35.68 28.17 1.00  70 69.00 -0.01
## Smoker         2 65  0.49  0.50   0.00    0.49  0.00 0.00   1  1.00  0.03
## Money_1        3 65  0.83  0.26   0.95    0.88  0.07 0.00   1  1.00 -1.70
## Money_2        4 65  0.76  0.27   0.90    0.81  0.15 0.00   1  1.00 -1.10
## Money_3        5 65  0.72  0.30   0.84    0.76  0.22 0.00   1  1.00 -1.01
## Money_4        6 65  0.61  0.31   0.69    0.64  0.32 0.01   1  0.99 -0.42
## Money_5        7 65  0.39  0.35   0.25    0.36  0.37 0.00   1  1.00  0.41
## Money_6        8 65  0.18  0.26   0.06    0.13  0.09 0.00   1  1.00  1.67
##             kurtosis   se
## Participant    -1.34 2.60
## Smoker         -2.03 0.06
## Money_1         1.84 0.03
## Money_2         0.12 0.03
## Money_3        -0.25 0.04
## Money_4        -1.14 0.04
## Money_5        -1.39 0.04
## Money_6         1.94 0.03
head(discounting)
##   Participant Smoker Money_1 Money_2 Money_3 Money_4 Money_5 Money_6
## 1           2      0  0.9990  0.9795  0.7490  0.4932  0.0986  0.0400
## 2           3      0  0.9600  0.7490  0.7178  0.2393  0.1807  0.0557
## 3           5      0  0.9912  0.8994  0.9033  0.7295  0.6865  0.1943
## 4           6      0  0.9932  0.9033  0.9385  0.8760  0.5850  0.0029
## 5          10      0  0.9053  0.9014  0.9053  0.8896  0.3994  0.1943
## 6          11      0  0.9756  0.9502  0.9502  0.8994  0.7490  0.4990
money_indiff <- c(0.95, 0.90, 0.84, 0.69, 0.25, 0.06)


money_df <- data.frame(time, money_indiff) # Creates data frame for analysis
head(money_df)
##     time money_indiff
## 1   0.25         0.95
## 2   0.50         0.90
## 3   1.00         0.84
## 4   6.00         0.69
## 5  60.00         0.25
## 6 300.00         0.06
###Mazur group model fit###
Mazur_mod <- money_indiff ~ 1/(1+(k*time))
Mazur_fit <- wrapnls(Mazur_mod, start=list(k=0), data = money_df)

print(Mazur_fit)
## Nonlinear regression model
##   model: money_indiff ~ 1/(1 + (k * time))
##    data: data
##       k 
## 0.07222 
##  residual sum-of-squares: 0.01809
## 
## Number of iterations to convergence: 0 
## Achieved convergence tolerance: 1.413e-07
#Calculate r-squared value
RSS <- sum(residuals(Mazur_fit)^2) #Residual sum of squares

TSS <- sum((money_indiff - mean(money_indiff))^2) #Total sum of squares

1 - (RSS/TSS) #R-squared
## [1] 0.9738256
#AIC for model fit
AIC(Mazur_fit, k = 2)
## [1] -13.79925
#Plot of model fit
predict(Mazur_fit) #Gives model predicted values
## [1] 0.98226443 0.96514701 0.93264165 0.69767161 0.18749797 0.04411709
plot(money_df) #Plots indifference points
time_range <- seq(0, 300, length = 1000) #Sets time range for best fit line.
lines(time_range, predict(Mazur_fit, data.frame(time = time_range))) #Adds best fit line

###Rachlin###

Rach_mod <- money_indiff ~ 1/(1+k*(time^s)) #Model
start_values <- c(k = 0, s = 1) #Parameter start values

Rach_fit <- wrapnls(Rach_mod, start = start_values, data = money_df, upper = c(s=1), lower = c(k=0, s=0)) #wrapnls is 

print(Rach_fit)
## Nonlinear regression model
##   model: money_indiff ~ 1/(1 + k * (time^s))
##    data: data
##      k      s 
## 0.1409 0.7445 
##  residual sum-of-squares: 0.004363
## 
## Algorithm "port", convergence message: relative convergence (4)
#AIC
AIC(Rach_fit, k=2)
## [1] -20.33145
#Calculate r-squared values
RSS <- sum(residuals(Rach_fit)^2) #Residual sum of squares

TSS <- sum((money_indiff - mean(money_indiff))^2) #Total sum of squares

1 - (RSS/TSS) #R-squared
## [1] 0.9936861
#Plot of model fit
predict(Rach_fit) #Gives model predicted values
## [1] 0.95221770 0.92244709 0.87653306 0.65157803 0.25192720 0.09223596
plot(money_df) #Plots indifference points
time_range <- seq(0, 300, length = 1000) #Sets time range for best fit line.
lines(time_range, predict(Rach_fit, data.frame(time = time_range))) #Adds best fit line

###Myerson & Green###

MG_mod <- money_indiff ~ 1/(1+k*time)^s #Model
start_values <- c(k = 0, s = 1) #Parameter start values

MG_fit <- wrapnls(Rach_mod, start = start_values, data = money_df, lower = c(k=0, s=0)) #wrapnls is 

print(MG_fit)
## Nonlinear regression model
##   model: money_indiff ~ 1/(1 + k * (time^s))
##    data: data
##      k      s 
## 0.1409 0.7445 
##  residual sum-of-squares: 0.004363
## 
## Algorithm "port", convergence message: both X-convergence and relative convergence (5)
#AIC
AIC(MG_fit, k=2)
## [1] -20.33145
#Calculate r-squared values
RSS <- sum(residuals(MG_fit)^2) #Residual sum of squares

TSS <- sum((money_indiff - mean(money_indiff))^2) #Total sum of squares

1 - (RSS/TSS) #R-squared
## [1] 0.9936861
#Plot of model fit
predict(MG_fit) #Gives model predicted values
## [1] 0.95221769 0.92244709 0.87653306 0.65157803 0.25192720 0.09223596
plot(money_df) #Plots indifference points
time_range <- seq(0, 300, length = 1000) #Sets time range for best fit line.
lines(time_range, predict(MG_fit, data.frame(time = time_range))) #Adds best fit line

###Compare model fits###
anova(Mazur_fit, Rach_fit)
## Analysis of Variance Table
## 
## Model 1: money_indiff ~ 1/(1 + (k * time))
## Model 2: money_indiff ~ 1/(1 + k * (time^s))
##   Res.Df Res.Sum Sq Df   Sum Sq F value  Pr(>F)  
## 1      5  0.0180852                              
## 2      4  0.0043626  1 0.013723  12.582 0.02386 *
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
anova(Mazur_fit, MG_fit)
## Analysis of Variance Table
## 
## Model 1: money_indiff ~ 1/(1 + (k * time))
## Model 2: money_indiff ~ 1/(1 + k * (time^s))
##   Res.Df Res.Sum Sq Df   Sum Sq F value  Pr(>F)  
## 1      5  0.0180852                              
## 2      4  0.0043626  1 0.013723  12.582 0.02386 *
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
###Graph all three models on one graph###
plot(money_df) #Plots indifference points
time_range <- seq(0, 300, length = 50) #Sets time range for best fit line.
lines(time_range, predict(Mazur_fit, data.frame(time = time_range)), col = "Green") #Adds Mazur best fit line
lines(time_range, predict(Rach_fit, data.frame(time = time_range)), col = "skyblue") #Adds Rachlin best fit line
lines(time_range, predict(MG_fit, data.frame(time = time_range)), col = "purple") #Adds Mazur best fit line

####Non-linear fit using nls (non-linear least squares) command for Individual data####


###Mazur###

#Create long-formated data-set
discounting_long <- reshape(discounting, idvar = "Participant", varying = c("Money_1", "Money_2", "Money_3", "Money_4", "Money_5", "Money_6"),timevar = "time",   direction = "long", v.names = ( "indiff"))

#Order Data
osub <- order(as.integer(discounting_long$Participant))
discounting_long <- discounting_long[osub, ]

#Convert time variable to units of months
discounting_long$time <- c("1" = ".25", "2" = ".5", "3" = "1", "4" = "6", "5" = "60", "6" = "300")

discounting_long$time <- as.numeric (discounting_long$time)

#Model Fit
demo_maz <- dlply(discounting_long, .(Participant), function(discounting_long) {nlxb(indiff ~ 1/(1+(k*time)), data = discounting_long, start = c(k=0))})

print(demo_maz)
## $`1`
## nlmrt class object: x 
## residual sumsquares =  0.022662  on  6 observations
##     after  14    Jacobian and  15 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.108837       0.02557      4.256   0.008044  -4.338e-13       2.633  
## 
## $`2`
## nlmrt class object: x 
## residual sumsquares =  0.016104  on  6 observations
##     after  11    Jacobian and  12 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.183549       0.03412      5.379   0.002992  -6.305e-15       1.663  
## 
## $`3`
## nlmrt class object: x 
## residual sumsquares =  0.033374  on  6 observations
##     after  11    Jacobian and  12 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.447171        0.1071      4.177   0.008681  -1.395e-15      0.7631  
## 
## $`4`
## nlmrt class object: x 
## residual sumsquares =  0.46128  on  6 observations
##     after  69    Jacobian and  70 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.330196        0.3074      1.074     0.3318  -7.337e-14      0.9882  
## 
## $`5`
## nlmrt class object: x 
## residual sumsquares =  0.068494  on  6 observations
##     after  8    Jacobian and  9 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0110324       0.00424      2.602    0.04814  -3.244e-12        27.6  
## 
## $`6`
## nlmrt class object: x 
## residual sumsquares =  0.044677  on  6 observations
##     after  9    Jacobian and  10 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0176498       0.00562      3.141    0.02565  -2.338e-12       16.82  
## 
## $`7`
## nlmrt class object: x 
## residual sumsquares =  0.097796  on  6 observations
##     after  14    Jacobian and  15 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                2.22862        0.7676      2.904    0.03365  -9.378e-14      0.1822  
## 
## $`8`
## nlmrt class object: x 
## residual sumsquares =  0.15529  on  6 observations
##     after  13    Jacobian and  14 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.483239        0.2464      1.962     0.1071  -9.363e-15      0.7154  
## 
## $`9`
## nlmrt class object: x 
## residual sumsquares =  0.46543  on  6 observations
##     after  26    Jacobian and  27 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                1.50746         1.132      1.332     0.2403  -1.249e-14      0.2696  
## 
## $`10`
## nlmrt class object: x 
## residual sumsquares =  0.025874  on  6 observations
##     after  10    Jacobian and  11 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0228473      0.005681      4.022     0.0101   -4.41e-13       12.66  
## 
## $`11`
## nlmrt class object: x 
## residual sumsquares =  0.016163  on  6 observations
##     after  9    Jacobian and  10 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k             0.00400063     0.0007739      5.169   0.003557  -1.271e-12       73.47  
## 
## $`12`
## nlmrt class object: x 
## residual sumsquares =  8.9979e-06  on  6 observations
##     after  3    Jacobian and  4 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k            1.12772e-05     4.413e-06      2.556    0.05092  -5.388e-11         304  
## 
## $`13`
## nlmrt class object: x 
## residual sumsquares =  0.13668  on  6 observations
##     after  9    Jacobian and  10 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0134456      0.007352      1.829      0.127   4.752e-14       22.49  
## 
## $`14`
## nlmrt class object: x 
## residual sumsquares =  0.053949  on  6 observations
##     after  12    Jacobian and  13 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                1.38823        0.3565      3.894    0.01148  -9.281e-16      0.2914  
## 
## $`15`
## nlmrt class object: x 
## residual sumsquares =  0.020268  on  6 observations
##     after  8    Jacobian and  9 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k             0.00768988      0.001605      4.791   0.004925  -4.121e-12       39.66  
## 
## $`16`
## nlmrt class object: x 
## residual sumsquares =  0.010586  on  6 observations
##     after  8    Jacobian and  9 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0076371      0.001152      6.628   0.001177  -3.322e-12       39.93  
## 
## $`17`
## nlmrt class object: x 
## residual sumsquares =  0.0062987  on  6 observations
##     after  10    Jacobian and  11 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.475297       0.04894      9.713  0.0001966   -2.01e-14      0.7253  
## 
## $`18`
## nlmrt class object: x 
## residual sumsquares =  0.24443  on  6 observations
##     after  13    Jacobian and  14 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                2.12134         1.152      1.841      0.125   -1.79e-16      0.1919  
## 
## $`20`
## nlmrt class object: x 
## residual sumsquares =  4.5553e-06  on  6 observations
##     after  3    Jacobian and  4 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k            3.93299e-06     3.126e-06      1.258     0.2639  -2.924e-12       305.3  
## 
## $`21`
## nlmrt class object: x 
## residual sumsquares =  0.035844  on  6 observations
##     after  10    Jacobian and  11 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0103825      0.002883      3.601    0.01553  -1.611e-12       29.37  
## 
## $`22`
## nlmrt class object: x 
## residual sumsquares =  0.0055937  on  6 observations
##     after  10    Jacobian and  11 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0146828      0.001632      8.996  0.0002832  -2.146e-13       20.49  
## 
## $`23`
## nlmrt class object: x 
## residual sumsquares =  0.0053663  on  6 observations
##     after  9    Jacobian and  10 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0525462      0.006379      8.237  0.0004298  -1.255e-14       5.135  
## 
## $`24`
## nlmrt class object: x 
## residual sumsquares =  0.058326  on  6 observations
##     after  12    Jacobian and  13 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0286927         0.011      2.609    0.04773  -2.345e-12        9.82  
## 
## $`25`
## nlmrt class object: x 
## residual sumsquares =  0.074321  on  6 observations
##     after  14    Jacobian and  15 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                4.17175         1.379      3.026    0.02921   -1.23e-14     0.08844  
## 
## $`26`
## nlmrt class object: x 
## residual sumsquares =  0.12145  on  6 observations
##     after  11    Jacobian and  12 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                0.09026       0.05025      1.796     0.1324  -3.303e-15       3.101  
## 
## $`27`
## nlmrt class object: x 
## residual sumsquares =  0.19303  on  6 observations
##     after  19    Jacobian and  20 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                1.11144        0.5513      2.016    0.09988  -1.266e-14      0.3564  
## 
## $`28`
## nlmrt class object: x 
## residual sumsquares =  0.080014  on  6 observations
##     after  21    Jacobian and  22 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.331772        0.1285      2.581    0.04938  -4.962e-14      0.9841  
## 
## $`30`
## nlmrt class object: x 
## residual sumsquares =  0.0092946  on  6 observations
##     after  12    Jacobian and  13 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.192038       0.02699      7.116  0.0008501  -1.334e-13       1.598  
## 
## $`31`
## nlmrt class object: x 
## residual sumsquares =  0.045753  on  6 observations
##     after  11    Jacobian and  12 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k             0.00609215      0.001923      3.169    0.02485  -2.541e-12       49.76  
## 
## $`32`
## nlmrt class object: x 
## residual sumsquares =  0.33072  on  6 observations
##     after  23    Jacobian and  24 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                1.05509        0.6894       1.53     0.1865  -5.127e-14      0.3731  
## 
## $`33`
## nlmrt class object: x 
## residual sumsquares =  0.043276  on  6 observations
##     after  15    Jacobian and  16 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0824112       0.02767      2.978    0.03086  -2.738e-13       3.362  
## 
## $`34`
## nlmrt class object: x 
## residual sumsquares =  0.015182  on  6 observations
##     after  7    Jacobian and  8 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k            0.000765989     0.0002676      2.862    0.03532   -1.54e-12       205.9  
## 
## $`35`
## nlmrt class object: x 
## residual sumsquares =  0.0042475  on  6 observations
##     after  14    Jacobian and  15 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                30.8355         7.449       4.14      0.009  -1.181e-14    0.003913  
## 
## $`37`
## nlmrt class object: x 
## residual sumsquares =  0.25807  on  6 observations
##     after  19    Jacobian and  20 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                8.98938         7.407      1.214     0.2791  -2.497e-15     0.03067  
## 
## $`38`
## nlmrt class object: x 
## residual sumsquares =  0.21868  on  6 observations
##     after  12    Jacobian and  13 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0168446       0.01182      1.425     0.2135  -4.717e-13       17.69  
## 
## $`39`
## nlmrt class object: x 
## residual sumsquares =  0.35841  on  6 observations
##     after  20    Jacobian and  21 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                2.63613         1.762      1.496     0.1949  -8.552e-14      0.1519  
## 
## $`41`
## nlmrt class object: x 
## residual sumsquares =  0.09118  on  6 observations
##     after  16    Jacobian and  17 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                2.79602        0.9492      2.946    0.03204  -1.736e-14      0.1423  
## 
## $`42`
## nlmrt class object: x 
## residual sumsquares =  0.1561  on  6 observations
##     after  9    Jacobian and  10 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0441771        0.0288      1.534     0.1857  -7.971e-14       6.135  
## 
## $`43`
## nlmrt class object: x 
## residual sumsquares =  0.18224  on  6 observations
##     after  18    Jacobian and  19 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.717919        0.3696      1.942     0.1097  -9.832e-14      0.5165  
## 
## $`45`
## nlmrt class object: x 
## residual sumsquares =  0.029859  on  6 observations
##     after  13    Jacobian and  14 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                3.77879        0.7732      4.887   0.004523  -3.738e-16     0.09995  
## 
## $`46`
## nlmrt class object: x 
## residual sumsquares =  0.0075843  on  6 observations
##     after  10    Jacobian and  11 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.100203       0.01376       7.28  0.0007649  -4.136e-13        2.83  
## 
## $`47`
## nlmrt class object: x 
## residual sumsquares =  0.052447  on  6 observations
##     after  13    Jacobian and  14 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0119207      0.004018      2.967    0.03127  -6.612e-13       25.49  
## 
## $`48`
## nlmrt class object: x 
## residual sumsquares =  0.018958  on  6 observations
##     after  10    Jacobian and  11 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.449027       0.08097      5.546   0.002618  -4.657e-16      0.7605  
## 
## $`49`
## nlmrt class object: x 
## residual sumsquares =  0.013958  on  6 observations
##     after  12    Jacobian and  13 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0938871       0.01764      5.323   0.003132  -5.456e-14       2.996  
## 
## $`50`
## nlmrt class object: x 
## residual sumsquares =  0.053247  on  6 observations
##     after  10    Jacobian and  11 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.558303        0.1625      3.435    0.01853  -5.563e-14       0.635  
## 
## $`51`
## nlmrt class object: x 
## residual sumsquares =  0.17349  on  6 observations
##     after  8    Jacobian and  9 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k             0.00267779      0.001815      1.475     0.2002   3.562e-13       102.6  
## 
## $`52`
## nlmrt class object: x 
## residual sumsquares =  0.0031697  on  6 observations
##     after  12    Jacobian and  13 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                9.13645        0.8412      10.86  0.0001149   -2.62e-14     0.02993  
## 
## $`53`
## nlmrt class object: x 
## residual sumsquares =  0.017573  on  6 observations
##     after  8    Jacobian and  9 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k             0.00269219     0.0005802       4.64    0.00563  -1.736e-12       102.2  
## 
## $`54`
## nlmrt class object: x 
## residual sumsquares =  0.022575  on  6 observations
##     after  12    Jacobian and  13 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.181582       0.04001      4.538    0.00618  -4.223e-14       1.679  
## 
## $`55`
## nlmrt class object: x 
## residual sumsquares =  0.0029666  on  6 observations
##     after  7    Jacobian and  8 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k             0.00184613     0.0001824      10.12  0.0001612  -5.138e-13       133.6  
## 
## $`56`
## nlmrt class object: x 
## residual sumsquares =  0.013142  on  6 observations
##     after  9    Jacobian and  10 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k             0.00975944       0.00164      5.952   0.001913  -1.539e-12       31.27  
## 
## $`57`
## nlmrt class object: x 
## residual sumsquares =  0.0065008  on  6 observations
##     after  7    Jacobian and  8 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k             0.00159948     0.0002471      6.474   0.001311  -5.216e-12       145.9  
## 
## $`58`
## nlmrt class object: x 
## residual sumsquares =  0.036117  on  6 observations
##     after  13    Jacobian and  14 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0205019      0.005953      3.444    0.01836  -1.117e-12       14.28  
## 
## $`59`
## nlmrt class object: x 
## residual sumsquares =  0.039916  on  6 observations
##     after  16    Jacobian and  17 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0345404       0.01118      3.091    0.02714  -5.521e-13       7.995  
## 
## $`60`
## nlmrt class object: x 
## residual sumsquares =  0.0012736  on  6 observations
##     after  7    Jacobian and  8 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k             0.00350522     0.0001937       18.1  9.456e-06  -2.493e-12       82.42  
## 
## $`61`
## nlmrt class object: x 
## residual sumsquares =  0.013575  on  6 observations
##     after  7    Jacobian and  8 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k             0.00154076     0.0003493      4.411   0.006951  -1.156e-12       149.2  
## 
## $`62`
## nlmrt class object: x 
## residual sumsquares =  0.038852  on  6 observations
##     after  14    Jacobian and  15 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0779867       0.02495      3.126    0.02609  -1.351e-13       3.533  
## 
## $`63`
## nlmrt class object: x 
## residual sumsquares =  0.09898  on  6 observations
##     after  20    Jacobian and  21 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.576119        0.2274      2.534    0.05228  -6.437e-14      0.6188  
## 
## $`64`
## nlmrt class object: x 
## residual sumsquares =  0.010951  on  6 observations
##     after  9    Jacobian and  10 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                0.13733       0.02178      6.306   0.001476  -3.172e-13       2.149  
## 
## $`65`
## nlmrt class object: x 
## residual sumsquares =  0.05325  on  6 observations
##     after  13    Jacobian and  14 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0063602      0.002162      2.942    0.03218  -4.752e-13       47.74  
## 
## $`66`
## nlmrt class object: x 
## residual sumsquares =  0.14098  on  6 observations
##     after  16    Jacobian and  17 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                7.22376          3.97       1.82     0.1285  -5.888e-15      0.0423  
## 
## $`67`
## nlmrt class object: x 
## residual sumsquares =  0.046222  on  6 observations
##     after  16    Jacobian and  17 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.454201        0.1276      3.558    0.01624  -1.587e-13      0.7532  
## 
## $`68`
## nlmrt class object: x 
## residual sumsquares =  0.035406  on  6 observations
##     after  9    Jacobian and  10 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0018987     0.0006416      2.959    0.03155  -2.627e-12       131.2  
## 
## $`69`
## nlmrt class object: x 
## residual sumsquares =  0.93866  on  6 observations
##     after  13    Jacobian and  14 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0209325       0.03105     0.6741     0.5302  -4.496e-12       13.95  
## 
## $`70`
## nlmrt class object: x 
## residual sumsquares =  0.015843  on  6 observations
##     after  21    Jacobian and  22 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                741.772          6817     0.1088     0.9176  -3.157e-14   8.257e-06  
## 
## attr(,"split_type")
## [1] "data.frame"
## attr(,"split_labels")
##    Participant
## 1            1
## 2            2
## 3            3
## 4            4
## 5            5
## 6            6
## 7            7
## 8            8
## 9            9
## 10          10
## 11          11
## 12          12
## 13          13
## 14          14
## 15          15
## 16          16
## 17          17
## 18          18
## 19          20
## 20          21
## 21          22
## 22          23
## 23          24
## 24          25
## 25          26
## 26          27
## 27          28
## 28          30
## 29          31
## 30          32
## 31          33
## 32          34
## 33          35
## 34          37
## 35          38
## 36          39
## 37          41
## 38          42
## 39          43
## 40          45
## 41          46
## 42          47
## 43          48
## 44          49
## 45          50
## 46          51
## 47          52
## 48          53
## 49          54
## 50          55
## 51          56
## 52          57
## 53          58
## 54          59
## 55          60
## 56          61
## 57          62
## 58          63
## 59          64
## 60          65
## 61          66
## 62          67
## 63          68
## 64          69
## 65          70
#Obtain parameter values
coef_Maz <- llply(demo_maz, function(discounting_long) coef(discounting_long))




#Create new dataset with model parameters
discounting_vars <- c("Participant", "Smoker", "Money_1", "Money_2", "Money_3", "Money_4", "Money_5", "Money_6")

osub <- order(as.integer(discounting$Participant))
discounting <- discounting[osub,]

discounting_clean <- discounting[discounting_vars]

discounting_clean$k <- coef_Maz

discounting_long_Maz <- reshape(discounting_clean, idvar = "Participant", varying = c("Money_1", "Money_2", "Money_3", "Money_4", "Money_5", "Money_6"),timevar = "time",   direction = "long", v.names = ( "indiff"))


#Order Data
osub <- order(as.integer(discounting_long_Maz$Participant))
discounting_long <- discounting_long_Maz[osub, ]

discounting_long_Maz$time <- c("1" = ".25", "2" = ".5", "3" = "1", "4" = "6", "5" = "60", "6" = "300")

discounting_long_Maz$time <- as.numeric (discounting_long_Maz$time)

discounting_long_Maz$k <- as.numeric (discounting_long_Maz$k)


#Create variables needed for r-squared and AIC calulcations
discounting_long_Maz$predict <- 1/(1+discounting_long_Maz$time*discounting_long_Maz$k)

discounting_long_Maz$resid <- discounting_long_Maz$indiff - discounting_long_Maz$predict

#Create final wide data-set
discounting_wide_Maz <- reshape(discounting_long_Maz, idvar = c("Participant", "Smoker"), timevar = "time", direction = "wide") #Change data-set into wide formate

#R-squared calculation
#Mean Indifference Point
discounting_wide_Maz$mean_indiff <- (discounting_wide_Maz$indiff.0.25 + discounting_wide_Maz$indiff.0.5 + discounting_wide_Maz$indiff.1 + discounting_wide_Maz$indiff.6 + discounting_wide_Maz$indiff.60 + discounting_wide_Maz$indiff.300)/6


#Residual sum of squares
#Total sum of squares
discounting_wide_Maz$TSS <-((discounting_wide_Maz$indiff.0.25 - discounting_wide_Maz$mean_indiff)^2 + (discounting_wide_Maz$indiff.0.5 - discounting_wide_Maz$mean_indiff)^2 + (discounting_wide_Maz$indiff.1 - discounting_wide_Maz$mean_indiff)^2 + (discounting_wide_Maz$indiff.6 - discounting_wide_Maz$mean_indiff)^2 + (discounting_wide_Maz$indiff.60 - discounting_wide_Maz$mean_indiff)^2 + (discounting_wide_Maz$indiff.300 - discounting_wide_Maz$mean_indiff)^2) 

#Residual sum of squares
discounting_wide_Maz$RSS <- (discounting_wide_Maz$resid.0.25^2 + discounting_wide_Maz$resid.0.5^2 + discounting_wide_Maz$resid.1^2 + discounting_wide_Maz$resid.6^2 + discounting_wide_Maz$resid.60^2 + discounting_wide_Maz$resid.300^2)

discounting_wide_Maz$r_squared <- (1 - (discounting_wide_Maz$RSS/discounting_wide_Maz$TSS))

median(discounting_wide_Maz$r_squared, na.rm = FALSE)
## [1] -1.3289
describeBy(discounting_wide_Maz$r_squared, group = discounting_wide_Maz$Smoker)
## 
##  Descriptive statistics by group 
## group: 0
##    vars  n mean  sd median trimmed  mad  min  max range skew kurtosis  se
## X1    1 33 -Inf NaN  -1.08   -0.93 0.97 -Inf 0.78   Inf  NaN      NaN NaN
## ------------------------------------------------------------ 
## group: 1
##    vars  n  mean   sd median trimmed  mad   min  max range  skew kurtosis   se
## X1    1 32 -1.68 1.73  -1.78   -1.46 1.61 -7.98 0.45  8.43 -1.48     3.19 0.31
#AIC for model fit. For individual model fits, it must be calculated by hand. Use AICc because of the low n (six indifference points).

discounting_wide_Maz$AICc <- -2*log(discounting_wide_Maz$RSS) + 2*1 + (2*1*(1+1))/(6-1-1)

median(discounting_wide_Maz$AICc, na.rm = FALSE)
## [1] 3.924
describeBy(discounting_wide_Maz$AICc, group = discounting_wide_Maz$Smoker)
## 
##  Descriptive statistics by group 
## group: 0
##    vars  n mean   sd median trimmed  mad  min  max range skew kurtosis   se
## X1    1 33 5.28 5.79   4.07    4.11 3.16 0.56 27.6 27.04 2.71     7.25 1.01
## ------------------------------------------------------------ 
## group: 1
##    vars  n mean   sd median trimmed  mad  min   max range skew kurtosis   se
## X1    1 32 4.11 2.44   3.89    3.79 1.58 1.08 11.89 10.81 1.63     2.99 0.43
#Calculate AUC
time_AUC <- c(.25/300, .5/300, 1/300, 6/300, 60/300, 300/300) #Use these values for the time values in AUC calculation


attach(discounting_wide_Maz)
## The following objects are masked _by_ .GlobalEnv:
## 
##     RSS, TSS
discounting_wide_Maz$AUC <- (((0.00083333-0)*((1 + indiff.0.25)/2)) + ((0.00166667-0.00083333)*((indiff.0.25+indiff.0.5)/2)) + ((0.00333333-0.00166667)*((indiff.0.5 + indiff.1)/2)) + ((0.02000000-0.00333333)*((indiff.1 + indiff.6)/2)) + ((0.20000000-0.02000000)*((indiff.6 + indiff.60)/2)) + ((1-0.20000000)*((indiff.60 + indiff.300)/2)))
###Rachlin###


#Create long-formated data-set
discounting_long <- reshape(discounting, idvar = "Participant", varying = c("Money_1", "Money_2", "Money_3", "Money_4", "Money_5", "Money_6"),timevar = "time",   direction = "long", v.names = ( "indiff"))



#Order Data
osub <- order(as.integer(discounting_long$Participant))
discounting_long <- discounting_long[osub, ]


discounting_long$time <- c("1" = ".25", "2" = ".5", "3" = "1", "4" = "6", "5" = "60", "6" = "300")

discounting_long$time <- as.numeric (discounting_long$time)
#Model fit
demo_Rach <- dlply(discounting_long, .(Participant), function(discounting_long) {nlxb(indiff ~ 1/(1+(k*time^s)), data = discounting_long, start = c(k=0, s=1), lower = c(s=0), upper = c(s=1))})

print(demo_Rach)
## $`1`
## nlmrt class object: x 
## residual sumsquares =  0.0082136  on  6 observations
##     after  14    Jacobian and  15 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                0.16927       0.03525      4.802   0.008634  -7.432e-15       2.148  
## s               0.743286       0.07691      9.665  0.0006413  -5.954e-15      0.5531  
## 
## $`2`
## nlmrt class object: x 
## residual sumsquares =  0.015562  on  6 observations
##     after  13    Jacobian and  14 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.198285       0.05808      3.414    0.02692  -4.495e-14        1.65  
## s               0.941678        0.1554      6.058   0.003747  -2.115e-14      0.3861  
## 
## $`3`
## nlmrt class object: x 
## residual sumsquares =  0.032256  on  6 observations
##     after  18    Jacobian and  19 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.459613        0.1223      3.759    0.01979  -2.291e-14      0.8063  
## s                0.89997        0.2036      4.421     0.0115  -4.397e-14      0.4281  
## 
## $`4`
## nlmrt class object: x 
## residual sumsquares =  0.010321  on  6 observations
##     after  13    Jacobian and  19 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.557551       0.06091      9.153  0.0007907   8.042e-13       1.902  
## s               0.179941       0.03491      5.154   0.006725  -1.606e-11      0.7823  
## 
## $`5`
## nlmrt class object: x 
## residual sumsquares =  0.047307  on  6 observations
##     after  17    Jacobian and  22 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0851665       0.06087      1.399     0.2343  -3.158e-12       4.651  
## s               0.573246        0.1662      3.448     0.0261  -2.396e-13      0.6197  
## 
## $`6`
## nlmrt class object: x 
## residual sumsquares =  0.044677  on  6 observations
##     after  9    Jacobian and  10 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0176498            NA         NA         NA  -2.338e-12       16.82  
## s                      1U           NA         NA         NA           0           0  
## 
## $`7`
## nlmrt class object: x 
## residual sumsquares =  0.22292  on  6 observations
##     after  7    Jacobian and  7 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                      1U           NA         NA         NA           0           0  
## s                      1U           NA         NA         NA           0           0  
## 
## $`8`
## nlmrt class object: x 
## residual sumsquares =  0.14532  on  6 observations
##     after  16    Jacobian and  17 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.520981        0.2635      1.977     0.1192   -2.24e-14      0.7971  
## s                0.74678        0.3506       2.13     0.1002  -5.624e-14      0.5184  
## 
## $`9`
## nlmrt class object: x 
## residual sumsquares =  0.10985  on  6 observations
##     after  16    Jacobian and  22 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                      1U           NA         NA         NA           0        1.46  
## s               0.205914            NA         NA         NA  -4.402e-12           0  
## 
## $`10`
## nlmrt class object: x 
## residual sumsquares =  0.018241  on  6 observations
##     after  15    Jacobian and  16 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0623946       0.03313      1.884     0.1327  -7.102e-13       5.508  
## s               0.744432         0.131      5.681   0.004738  -1.806e-13      0.5017  
## 
## $`11`
## nlmrt class object: x 
## residual sumsquares =  0.0017343  on  6 observations
##     after  15    Jacobian and  20 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0391996      0.008983      4.364    0.01203    -8.8e-12       8.703  
## s               0.557789       0.04652      11.99  0.0002772   1.688e-12      0.4401  
## 
## $`12`
## nlmrt class object: x 
## residual sumsquares =  6.8644e-07  on  6 observations
##     after  12    Jacobian and  15 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k             0.00108043     0.0002057      5.252   0.006291  -1.393e-12       4.143  
## s               0.186597       0.04191      4.452    0.01123  -1.319e-12    0.009884  
## 
## $`13`
## nlmrt class object: x 
## residual sumsquares =  0.13668  on  6 observations
##     after  13    Jacobian and  14 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0134456            NA         NA         NA  -5.696e-09       22.49  
## s                      1U           NA         NA         NA           0           0  
## 
## $`14`
## nlmrt class object: x 
## residual sumsquares =  0.054668  on  6 observations
##     after  15    Jacobian and  16 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                      1U           NA         NA         NA           0      0.5096  
## s               0.690485            NA         NA         NA  -8.771e-14           0  
## 
## $`15`
## nlmrt class object: x 
## residual sumsquares =  0.020268  on  6 observations
##     after  8    Jacobian and  9 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k             0.00768988            NA         NA         NA  -4.121e-12       39.66  
## s                      1U           NA         NA         NA           0           0  
## 
## $`16`
## nlmrt class object: x 
## residual sumsquares =  0.010586  on  6 observations
##     after  8    Jacobian and  9 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0076371            NA         NA         NA  -3.322e-12       39.93  
## s                      1U           NA         NA         NA           0           0  
## 
## $`17`
## nlmrt class object: x 
## residual sumsquares =  0.0061122  on  6 observations
##     after  11    Jacobian and  12 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                0.48146       0.05608      8.585   0.001011  -3.635e-15       0.745  
## s               0.964658       0.09625      10.02  0.0005572  -4.774e-15      0.3978  
## 
## $`18`
## nlmrt class object: x 
## residual sumsquares =  0.21911  on  6 observations
##     after  14    Jacobian and  20 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                      1U           NA         NA         NA           0      0.8598  
## s               0.423327            NA         NA         NA   4.414e-11           0  
## 
## $`20`
## nlmrt class object: x 
## residual sumsquares =  1.9137e-27  on  6 observations
##     after  11    Jacobian and  13 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.001001     1.051e-14  9.529e+10          0  -4.432e-16       2.445  
## s             6.9493e-12     3.469e-12      2.003     0.1157   2.751e-16    0.006305  
## 
## $`21`
## nlmrt class object: x 
## residual sumsquares =  0.035844  on  6 observations
##     after  10    Jacobian and  11 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0103825            NA         NA         NA  -1.611e-12       29.37  
## s                      1U           NA         NA         NA           0           0  
## 
## $`22`
## nlmrt class object: x 
## residual sumsquares =  0.0018376  on  6 observations
##     after  11    Jacobian and  16 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0303234      0.007323      4.141    0.01436    7.77e-10       10.76  
## s               0.829359       0.05432      15.27  0.0001073  -1.826e-10      0.3913  
## 
## $`23`
## nlmrt class object: x 
## residual sumsquares =  0.0053663  on  6 observations
##     after  9    Jacobian and  10 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0525462            NA         NA         NA  -1.255e-14       5.135  
## s                      1U           NA         NA         NA           0           0  
## 
## $`24`
## nlmrt class object: x 
## residual sumsquares =  0.044989  on  6 observations
##     after  15    Jacobian and  20 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0819775        0.0589      1.392     0.2364  -3.695e-12       4.205  
## s               0.744059        0.1927      3.861    0.01813   2.078e-12      0.5305  
## 
## $`25`
## nlmrt class object: x 
## residual sumsquares =  0.42167  on  6 observations
##     after  18    Jacobian and  23 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                      1U           NA         NA         NA           0      0.5024  
## s               0.700311            NA         NA         NA  -2.312e-14           0  
## 
## $`26`
## nlmrt class object: x 
## residual sumsquares =  0.10202  on  6 observations
##     after  19    Jacobian and  20 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.216078        0.1338      1.615     0.1816  -1.829e-13       1.921  
## s               0.640671        0.2224      2.881    0.04497  -1.324e-13      0.6496  
## 
## $`27`
## nlmrt class object: x 
## residual sumsquares =  0.026361  on  6 observations
##     after  13    Jacobian and  19 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.874285        0.1423      6.143    0.00356  -7.294e-12       1.276  
## s               0.300115       0.06984      4.297    0.01267  -1.222e-11      0.5591  
## 
## $`28`
## nlmrt class object: x 
## residual sumsquares =  0.0037712  on  6 observations
##     after  12    Jacobian and  18 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.486082       0.03746      12.98  0.0002034  -2.123e-10        1.12  
## s                0.52863       0.03659      14.45  0.0001334   1.767e-10      0.6884  
## 
## $`30`
## nlmrt class object: x 
## residual sumsquares =  0.0031697  on  6 observations
##     after  12    Jacobian and  13 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.246614       0.02721      9.062  0.0008218  -1.122e-14       1.462  
## s               0.822643        0.0563      14.61  0.0001276  -1.097e-14      0.4732  
## 
## $`31`
## nlmrt class object: x 
## residual sumsquares =  0.045753  on  6 observations
##     after  11    Jacobian and  12 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k             0.00609215            NA         NA         NA  -2.541e-12       49.76  
## s                      1U           NA         NA         NA           0           0  
## 
## $`32`
## nlmrt class object: x 
## residual sumsquares =  0.15685  on  6 observations
##     after  12    Jacobian and  17 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                      1U           NA         NA         NA           0       1.099  
## s               0.325838            NA         NA         NA   2.119e-12           0  
## 
## $`33`
## nlmrt class object: x 
## residual sumsquares =  0.043276  on  6 observations
##     after  15    Jacobian and  16 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0824112            NA         NA         NA  -2.738e-13       3.362  
## s                      1U           NA         NA         NA           0           0  
## 
## $`34`
## nlmrt class object: x 
## residual sumsquares =  0.0020021  on  6 observations
##     after  18    Jacobian and  24 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0350244       0.01148      3.051    0.03799   1.841e-12       5.477  
## s               0.314595       0.06732      4.673   0.009498  -1.831e-12      0.3282  
## 
## $`35`
## nlmrt class object: x 
## residual sumsquares =  0.83694  on  6 observations
##     after  26    Jacobian and  33 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                      1U           NA         NA         NA           0      0.9477  
## s               0.384105            NA         NA         NA  -4.373e-13           0  
## 
## $`37`
## nlmrt class object: x 
## residual sumsquares =  0.3329  on  6 observations
##     after  14    Jacobian and  15 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                      1U           NA         NA         NA           0       1.512  
## s               0.188936            NA         NA         NA  -5.336e-15           0  
## 
## $`38`
## nlmrt class object: x 
## residual sumsquares =  0.092882  on  6 observations
##     after  15    Jacobian and  20 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.269424        0.1297      2.077     0.1064  -3.156e-13        2.19  
## s               0.390098        0.1395      2.796    0.04899   8.064e-13      0.8592  
## 
## $`39`
## nlmrt class object: x 
## residual sumsquares =  0.23317  on  6 observations
##     after  25    Jacobian and  31 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                      1U           NA         NA         NA           0       1.003  
## s               0.361775            NA         NA         NA  -1.683e-13           0  
## 
## $`41`
## nlmrt class object: x 
## residual sumsquares =  0.12163  on  6 observations
##     after  20    Jacobian and  21 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                      1U           NA         NA         NA           0       0.761  
## s               0.474962            NA         NA         NA  -2.801e-13           0  
## 
## $`42`
## nlmrt class object: x 
## residual sumsquares =  0.14794  on  6 observations
##     after  33    Jacobian and  34 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.153972        0.1391      1.107     0.3304  -8.798e-13        2.54  
## s               0.658103         0.281      2.342    0.07918  -4.553e-13      0.6322  
## 
## $`43`
## nlmrt class object: x 
## residual sumsquares =  0.13771  on  6 observations
##     after  16    Jacobian and  22 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.676193        0.2937      2.302    0.08274  -2.601e-12      0.7068  
## s               0.628661        0.2929      2.146    0.09838   1.115e-11      0.5777  
## 
## $`45`
## nlmrt class object: x 
## residual sumsquares =  0.31875  on  6 observations
##     after  17    Jacobian and  18 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                      1U           NA         NA         NA           0      0.4973  
## s               0.707461            NA         NA         NA  -6.337e-14           0  
## 
## $`46`
## nlmrt class object: x 
## residual sumsquares =  0.0075843  on  6 observations
##     after  10    Jacobian and  11 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.100203            NA         NA         NA  -4.136e-13        2.83  
## s                      1U           NA         NA         NA           0           0  
## 
## $`47`
## nlmrt class object: x 
## residual sumsquares =  0.052447  on  6 observations
##     after  13    Jacobian and  14 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0119207            NA         NA         NA  -6.612e-13       25.49  
## s                      1U           NA         NA         NA           0           0  
## 
## $`48`
## nlmrt class object: x 
## residual sumsquares =  0.018877  on  6 observations
##     after  14    Jacobian and  15 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.453288       0.09555      4.744   0.009011   -5.92e-14      0.7821  
## s               0.974663        0.1704       5.72   0.004622  -1.323e-13      0.3937  
## 
## $`49`
## nlmrt class object: x 
## residual sumsquares =  0.013958  on  6 observations
##     after  12    Jacobian and  13 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0938871            NA         NA         NA  -5.456e-14       2.996  
## s                      1U           NA         NA         NA           0           0  
## 
## $`50`
## nlmrt class object: x 
## residual sumsquares =  0.053247  on  6 observations
##     after  10    Jacobian and  11 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.558303            NA         NA         NA  -5.563e-14       0.635  
## s                      1U           NA         NA         NA           0           0  
## 
## $`51`
## nlmrt class object: x 
## residual sumsquares =  0.17349  on  6 observations
##     after  8    Jacobian and  9 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k             0.00267779            NA         NA         NA   3.562e-13       102.6  
## s                      1U           NA         NA         NA           0           0  
## 
## $`52`
## nlmrt class object: x 
## residual sumsquares =  0.57006  on  6 observations
##     after  20    Jacobian and  26 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                      1U           NA         NA         NA           0      0.7489  
## s               0.482024            NA         NA         NA  -9.252e-14           0  
## 
## $`53`
## nlmrt class object: x 
## residual sumsquares =  0.017573  on  6 observations
##     after  8    Jacobian and  9 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k             0.00269219            NA         NA         NA  -1.736e-12       102.2  
## s                      1U           NA         NA         NA           0           0  
## 
## $`54`
## nlmrt class object: x 
## residual sumsquares =  0.01503  on  6 observations
##     after  12    Jacobian and  13 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.247033       0.05861      4.215    0.01353  -7.636e-14       1.484  
## s               0.798589        0.1171      6.817    0.00242  -8.554e-14      0.4932  
## 
## $`55`
## nlmrt class object: x 
## residual sumsquares =  0.0029666  on  6 observations
##     after  7    Jacobian and  8 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k             0.00184613            NA         NA         NA  -5.138e-13       133.6  
## s                      1U           NA         NA         NA           0           0  
## 
## $`56`
## nlmrt class object: x 
## residual sumsquares =  0.013142  on  6 observations
##     after  9    Jacobian and  10 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k             0.00975944            NA         NA         NA  -1.539e-12       31.27  
## s                      1U           NA         NA         NA           0           0  
## 
## $`57`
## nlmrt class object: x 
## residual sumsquares =  7.2188e-05  on  6 observations
##     after  16    Jacobian and  23 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0184609      0.001537      12.01  0.0002752  -6.282e-11       13.64  
## s               0.551215       0.01595      34.56  4.181e-06   7.026e-12      0.2652  
## 
## $`58`
## nlmrt class object: x 
## residual sumsquares =  0.0084748  on  6 observations
##     after  12    Jacobian and  17 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                 0.0884       0.02618      3.377    0.02786   9.374e-11       4.282  
## s               0.644254       0.07361      8.752  0.0009393  -5.529e-11      0.5948  
## 
## $`59`
## nlmrt class object: x 
## residual sumsquares =  0.039916  on  6 observations
##     after  16    Jacobian and  17 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0345404            NA         NA         NA  -5.521e-13       7.995  
## s                      1U           NA         NA         NA           0           0  
## 
## $`60`
## nlmrt class object: x 
## residual sumsquares =  0.00087917  on  6 observations
##     after  11    Jacobian and  15 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0054525      0.001846      2.953    0.04183    9.88e-11          54  
## s               0.916064       0.06396      14.32  0.0001381  -4.202e-12      0.2317  
## 
## $`61`
## nlmrt class object: x 
## residual sumsquares =  0.013575  on  6 observations
##     after  7    Jacobian and  8 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k             0.00154076            NA         NA         NA  -1.156e-12       149.2  
## s                      1U           NA         NA         NA           0           0  
## 
## $`62`
## nlmrt class object: x 
## residual sumsquares =  0.038852  on  6 observations
##     after  14    Jacobian and  15 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0779867            NA         NA         NA  -1.351e-13       3.533  
## s                      1U           NA         NA         NA           0           0  
## 
## $`63`
## nlmrt class object: x 
## residual sumsquares =  0.025138  on  6 observations
##     after  12    Jacobian and  18 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.619219        0.1133      5.466    0.00545  -1.823e-11      0.9224  
## s               0.525644       0.09926      5.296   0.006106   1.881e-11      0.6409  
## 
## $`64`
## nlmrt class object: x 
## residual sumsquares =  0.010951  on  6 observations
##     after  9    Jacobian and  10 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                0.13733            NA         NA         NA  -3.172e-13       2.149  
## s                      1U           NA         NA         NA           0           0  
## 
## $`65`
## nlmrt class object: x 
## residual sumsquares =  0.05325  on  6 observations
##     after  13    Jacobian and  14 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0063602            NA         NA         NA  -4.752e-13       47.74  
## s                      1U           NA         NA         NA           0           0  
## 
## $`66`
## nlmrt class object: x 
## residual sumsquares =  0.31547  on  6 observations
##     after  17    Jacobian and  18 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                      1U           NA         NA         NA           0       1.217  
## s                0.28509            NA         NA         NA  -7.622e-14           0  
## 
## $`67`
## nlmrt class object: x 
## residual sumsquares =  0.0079676  on  6 observations
##     after  13    Jacobian and  14 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.549308        0.0609       9.02  0.0008368  -4.342e-14      0.8637  
## s               0.623128       0.06629      9.401  0.0007136  -9.517e-14      0.6055  
## 
## $`68`
## nlmrt class object: x 
## residual sumsquares =  0.0053836  on  6 observations
##     after  19    Jacobian and  25 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k              0.0490281        0.0187      2.622    0.05868  -9.255e-13       6.112  
## s               0.402259       0.07806      5.153   0.006729   1.788e-13      0.4583  
## 
## $`69`
## nlmrt class object: x 
## residual sumsquares =  0.33145  on  6 observations
##     after  16    Jacobian and  22 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k               0.893914        0.4999      1.788     0.1483    1.01e-13       1.746  
## s               0.136734        0.1939     0.7051     0.5196  -2.115e-12      0.5642  
## 
## $`70`
## nlmrt class object: x 
## residual sumsquares =  1.0438  on  6 observations
##     after  26    Jacobian and  27 function evaluations
##   name            coeff          SE       tstat      pval      gradient    JSingval   
## k                      1U           NA         NA         NA           0       1.055  
## s               0.341757            NA         NA         NA  -4.668e-13           0  
## 
## attr(,"split_type")
## [1] "data.frame"
## attr(,"split_labels")
##    Participant
## 1            1
## 2            2
## 3            3
## 4            4
## 5            5
## 6            6
## 7            7
## 8            8
## 9            9
## 10          10
## 11          11
## 12          12
## 13          13
## 14          14
## 15          15
## 16          16
## 17          17
## 18          18
## 19          20
## 20          21
## 21          22
## 22          23
## 23          24
## 24          25
## 25          26
## 26          27
## 27          28
## 28          30
## 29          31
## 30          32
## 31          33
## 32          34
## 33          35
## 34          37
## 35          38
## 36          39
## 37          41
## 38          42
## 39          43
## 40          45
## 41          46
## 42          47
## 43          48
## 44          49
## 45          50
## 46          51
## 47          52
## 48          53
## 49          54
## 50          55
## 51          56
## 52          57
## 53          58
## 54          59
## 55          60
## 56          61
## 57          62
## 58          63
## 59          64
## 60          65
## 61          66
## 62          67
## 63          68
## 64          69
## 65          70
coef_Rach <- ldply(demo_Rach, function(discounting_long) coef(discounting_long))


coef_Rach$k <- as.numeric(coef_Rach$k)
coef_Rach$s <- as.numeric(coef_Rach$s)


osub <- order(as.integer(discounting$Participant))
discounting <- discounting[osub,]


discounting_vars <- c("Participant", "Smoker", "Money_1", "Money_2", "Money_3", "Money_4", "Money_5", "Money_6")


discounting_clean <- discounting[discounting_vars]

discounting_clean$k <- coef_Rach$k

discounting_clean$s <- coef_Rach$s




discounting_long_Rach <- reshape(discounting_clean, idvar = "Participant", varying = c("Money_1", "Money_2", "Money_3", "Money_4", "Money_5", "Money_6"),timevar = "time",   direction = "long", v.names = ( "indiff"))



#Order Data
osub <- order(as.integer(discounting_long_Rach$Participant))
discounting_long_Rach <- discounting_long_Rach[osub, ]

discounting_long_Rach$time <- c("1" = ".25", "2" = ".5", "3" = "1", "4" = "6", "5" = "60", "6" = "300")

discounting_long_Rach$time <- as.numeric (discounting_long_Rach$time)



discounting_long_Rach$predict <- 1/(1+discounting_long_Rach$k*(discounting_long_Rach$time^discounting_long_Rach$s))

discounting_long_Rach$resid <- discounting_long_Rach$indiff - discounting_long_Rach$predict


discounting_wide_Rach <- reshape(discounting_long_Rach, idvar = c("Participant", "Smoker"), timevar = "time", direction = "wide") #Change data-set into wide formate


#Mean Indifference Point
discounting_wide_Rach$mean_indiff <- (discounting_wide_Rach$indiff.0.25 + discounting_wide_Rach$indiff.0.5 + discounting_wide_Rach$indiff.1 + discounting_wide_Rach$indiff.6 + discounting_wide_Rach$indiff.60 + discounting_wide_Rach$indiff.300)/6


#Residual sum of squares
#Total sum of squares
discounting_wide_Rach$TSS <-((discounting_wide_Rach$indiff.0.25 - discounting_wide_Rach$mean_indiff)^2 + (discounting_wide_Rach$indiff.0.5 - discounting_wide_Rach$mean_indiff)^2 + (discounting_wide_Rach$indiff.1 - discounting_wide_Rach$mean_indiff)^2 + (discounting_wide_Rach$indiff.6 - discounting_wide_Rach$mean_indiff)^2 + (discounting_wide_Rach$indiff.60 - discounting_wide_Rach$mean_indiff)^2 + (discounting_wide_Rach$indiff.300 - discounting_wide_Rach$mean_indiff)^2) 

#Residual sum of squares
discounting_wide_Rach$RSS <- (discounting_wide_Rach$resid.0.25^2 + discounting_wide_Rach$resid.0.5^2 + discounting_wide_Rach$resid.1^2 + discounting_wide_Rach$resid.6^2 + discounting_wide_Rach$resid.60^2 + discounting_wide_Rach$resid.300^2)

discounting_wide_Rach$r_squared <- (1 - (discounting_wide_Rach$RSS/discounting_wide_Rach$TSS))

median(discounting_wide_Rach$r_squared, na.rm = FALSE)
## [1] 0.92656
describeBy(discounting_wide_Rach$r_squared, group = discounting_wide_Rach$Smoker)
## 
##  Descriptive statistics by group 
## group: 0
##    vars  n mean  sd median trimmed  mad  min max range skew kurtosis  se
## X1    1 33 -Inf NaN   0.96    0.96 0.04 -Inf   1   Inf  NaN      NaN NaN
## ------------------------------------------------------------ 
## group: 1
##    vars  n  mean    sd median trimmed  mad     min max  range  skew kurtosis
## X1    1 32 -6.06 23.94   0.74    0.13 0.36 -111.86   1 112.86 -3.57    11.64
##      se
## X1 4.23
#AIC for model fit
discounting_wide_Rach$AICc <- -2*log(discounting_wide_Rach$RSS) + 2*2 + (2*2*(2+1))/(6-2-1) # n = number of observations or data points


median(discounting_wide_Rach$AICc, na.rm = FALSE)
## [1] 14.657
describeBy(discounting_wide_Rach$AICc, group = discounting_wide_Rach$Smoker)
## 
##  Descriptive statistics by group 
## group: 0
##    vars  n  mean    sd median trimmed  mad   min    max  range skew kurtosis
## X1    1 33 21.03 20.24  16.54   16.95 3.12 13.87 131.04 117.18 4.86    23.45
##      se
## X1 3.52
## ------------------------------------------------------------ 
## group: 1
##    vars  n  mean   sd median trimmed  mad  min   max range skew kurtosis   se
## X1    1 32 12.86 3.08  11.97   12.71 2.62 7.91 19.51 11.59 0.54     -0.8 0.54
#AUC
time_AUC <- c(.25/300, .5/300, 1/300, 6/300, 60/300, 300/300) #Use these values for the time values in AUC calculation


attach(discounting_wide_Rach)
## The following objects are masked _by_ .GlobalEnv:
## 
##     RSS, TSS
## The following objects are masked from discounting_wide_Maz:
## 
##     AICc, indiff.0.25, indiff.0.5, indiff.1, indiff.300, indiff.6,
##     indiff.60, k.0.25, k.0.5, k.1, k.300, k.6, k.60, mean_indiff,
##     Participant, predict.0.25, predict.0.5, predict.1, predict.300,
##     predict.6, predict.60, r_squared, resid.0.25, resid.0.5, resid.1,
##     resid.300, resid.6, resid.60, RSS, Smoker, TSS
discounting_wide_Rach$AUC <- (((0.00083333-0)*((1 + indiff.0.25)/2)) + ((0.00166667-0.00083333)*((indiff.0.25+indiff.0.5)/2)) + ((0.00333333-0.00166667)*((indiff.0.5 + indiff.1)/2)) + ((0.02000000-0.00333333)*((indiff.1 + indiff.6)/2)) + ((0.20000000-0.02000000)*((indiff.6 + indiff.60)/2)) + ((1-0.20000000)*((indiff.60 + indiff.300)/2)))