This is an example of running an R version of Google Datalab

Google Datalab is a service that lets you easily interact with your data in the Google Cloud. This document is an excercise in trying to replicate the same functionality:

  • Runs on Google Cloud infrastructure using googleComputeEngineR within its own Docker container
  • Uses RStudio and its RMarkdown Notebooks to replicate the Jupyter/iPython functionality
  • Auto authentication with the Google cloud services to work with BigQuery and Cloud Storage data
  • Cross language support of python, SQL and bash via R Notebooks
  • Python data analysis libraries pandas and NumPy
  • Visualisation via R libraries such as the htmlwidgets family
  • Installation of Tensorflow and RStudio’s tensorflow package
  • Installation of tensorflow helper library tflearn
  • Installation of feather to help R and python share data nicely.

Setup

library(googleAuthR)
## this reuses the authentication of the GCE instance we are on
gar_gce_auth()

library(bigQueryR)
## list authenticated projects
myproject <- bqr_list_projects()

library(googleCloudStorageR)
## Setting scopes to https://www.googleapis.com/auth/devstorage.full_control
## If you need additional scopes set do so via options(googleAuthR.scopes.selected = c('scope1', 'scope2')) before loading library and include one required scope.
## list Cloud Storage buckets
gcs_list_buckets(myproject$id[[1]])
##                                       name storageClass location
## 1 artifacts.mark-edmondson-gde.appspot.com     STANDARD       US
## 2      mark-edmondson-gde-minecraft-backup     STANDARD       US
## 3              mark-edmondson-public-files     STANDARD       EU
##               updated
## 1 2016-10-07 11:37:55
## 2 2015-11-10 09:28:38
## 3 2016-08-27 20:47:23

Demo of running python in same document:

hiss = 'sssssssss'
print "Pythons go %s." % hiss
## Pythons go sssssssss.

Also works with SQL and bash

pip freeze
## Cython==0.25.1
## Pillow==3.4.2
## argparse==1.2.1
## cffi==0.8.6
## chardet==2.3.0
## colorama==0.3.2
## cryptography==0.6.1
## feather-format==0.3.1
## funcsigs==1.0.2
## h5py==2.6.0
## html5lib==0.999
## mock==2.0.0
## ndg-httpsclient==0.3.2
## numpy==1.11.2
## pandas==0.19.1
## pbr==1.10.0
## ply==3.4
## protobuf==3.0.0
## pyOpenSSL==0.14
## pyasn1==0.1.7
## pycparser==2.10
## python-dateutil==2.6.0
## pytz==2016.7
## requests==2.4.3
## six==1.10.0
## tensorflow==0.11.0
## tflearn==0.2.2
## urllib3==1.9.1
## wheel==0.29.0
## wsgiref==0.1.2

Transfer data between R and Python with feather

From the example intro blogpost for feather:

library(feather)
df <- mtcars
path <- "my_data.feather"
write_feather(df, path)
import feather
path = 'my_data.feather'
df = feather.read_dataframe(path)
df.head

Tensorflow

Hello world Python

from __future__ import print_function

import tensorflow as tf

# Simple hello world using TensorFlow

# Create a Constant op
# The op is added as a node to the default graph.
#
# The value returned by the constructor represents the output
# of the Constant op.
hello = tf.constant('Hello, TensorFlow!')

# Start tf session
sess = tf.Session()

# Run the op
print(sess.run(hello))
## Hello, TensorFlow!

Hello world R

library(tensorflow)
sess = tf$Session()
hello <- tf$constant('Hello, TensorFlow!')
sess$run(hello)
## [1] "Hello, TensorFlow!"

tflearn Titanic example

from __future__ import print_function

import numpy as np
import tflearn

# Download the Titanic dataset
from tflearn.datasets import titanic
titanic.download_dataset('titanic_dataset.csv')

# Load CSV file, indicate that the first column represents labels
from tflearn.data_utils import load_csv
data, labels = load_csv('titanic_dataset.csv', target_column=0,
                        categorical_labels=True, n_classes=2)

# Preprocessing function
def preprocess(data, columns_to_ignore):
    # Sort by descending id and delete columns
    for id in sorted(columns_to_ignore, reverse=True):
        [r.pop(id) for r in data]
    for i in range(len(data)):
      # Converting 'sex' field to float (id is 1 after removing labels column)
      data[i][1] = 1. if data[i][1] == 'female' else 0.
    return np.array(data, dtype=np.float32)

# Ignore 'name' and 'ticket' columns (id 1 & 6 of data array)
to_ignore=[1, 6]

# Preprocess data
data = preprocess(data, to_ignore)

# Build neural network
net = tflearn.input_data(shape=[None, 6])
net = tflearn.fully_connected(net, 32)
net = tflearn.fully_connected(net, 32)
net = tflearn.fully_connected(net, 2, activation='softmax')
net = tflearn.regression(net)

# Define model
model = tflearn.DNN(net)
# Start training (apply gradient descent algorithm)
model.fit(data, labels, n_epoch=10, batch_size=16, show_metric=True)

# Let's create some data for DiCaprio and Winslet
dicaprio = [3, 'Jack Dawson', 'male', 19, 0, 0, 'N/A', 5.0000]
winslet = [1, 'Rose DeWitt Bukater', 'female', 17, 1, 2, 'N/A', 100.0000]
# Preprocess data
dicaprio, winslet = preprocess([dicaprio, winslet], to_ignore)
# Predict surviving chances (class 1 results)
pred = model.predict([dicaprio, winslet])
print("DiCaprio Surviving Rate:", pred[0][1])
print("Winslet Surviving Rate:", pred[1][1])

tflearn using R as well

From the tflearn quickstart modified to use R for data preprocessing:

import tflearn

# Download the Titanic dataset to local file 'titanic_dataset.csv'
from tflearn.datasets import titanic
titanic.download_dataset('titanic_dataset.csv')
## Scipy not supported!

Use R to process data:

library(dplyr)

titanic <- read.csv('titanic_dataset.csv')

processed <- titanic %>% 
  select(-name, -ticket) %>%
  mutate(sex = as.numeric(as.factor(sex)) - 1)
str(processed)
## 'data.frame':    1309 obs. of  7 variables:
##  $ survived: int  1 1 0 0 0 1 1 0 1 0 ...
##  $ pclass  : int  1 1 1 1 1 1 1 1 1 1 ...
##  $ sex     : num  0 1 0 1 0 1 0 1 0 1 ...
##  $ age     : num  29 0.917 2 30 25 ...
##  $ sibsp   : int  0 1 1 1 1 0 1 0 2 0 ...
##  $ parch   : int  0 2 2 2 2 0 0 0 0 0 ...
##  $ fare    : num  211 152 152 152 152 ...
write.table(processed, "processed.csv",sep = ",", quote = FALSE, row.names = FALSE)

Back to Python to run model:

from __future__ import print_function

import numpy as np
import tflearn

# Load processed CSV file, indicate that the first column represents labels
from tflearn.data_utils import load_csv
data, labels = load_csv('processed.csv', target_column=0,
                        categorical_labels=True, n_classes=2)

data = np.array(data, dtype=np.float32)

# Build neural network
net = tflearn.input_data(shape=[None, 6])
net = tflearn.fully_connected(net, 32)
net = tflearn.fully_connected(net, 32)
net = tflearn.fully_connected(net, 2, activation='softmax')
net = tflearn.regression(net)

# Define model
model = tflearn.DNN(net)
# Start training (apply gradient descent algorithm)
model.fit(data, labels, n_epoch=10, batch_size=16)

# Let's create some data for DiCaprio and Winslet
dicaprio = [3, 1, 19, 0, 0, 5.0000]
winslet = [1, 0, 17, 1, 2, 100.0000]

# Predict surviving chances (class 1 results)
pred = model.predict([dicaprio, winslet])
print("DiCaprio Surviving Rate:", pred[0][1])
print("Winslet Surviving Rate:", pred[1][1])
## Scipy not supported!
## ---------------------------------
## Run id: U527AR
## Log directory: /tmp/tflearn_logs/
## ---------------------------------
## Training samples: 1309
## Validation samples: 0
## --
## Training Step: 1 
## 
| Adam | epoch: 001 | loss: 0.00000 -- iter: 0016/1309
## Training Step: 2  | total loss: 0.62355
## 
| Adam | epoch: 001 | loss: 0.62355 -- iter: 0032/1309
## Training Step: 3  | total loss: 0.68624
## 
| Adam | epoch: 001 | loss: 0.68624 -- iter: 0048/1309
## Training Step: 4  | total loss: 0.68828
## 
| Adam | epoch: 001 | loss: 0.68828 -- iter: 0064/1309
## Training Step: 5  | total loss: 0.68793
## 
| Adam | epoch: 001 | loss: 0.68793 -- iter: 0080/1309
## Training Step: 6  | total loss: 0.69098
## 
| Adam | epoch: 001 | loss: 0.69098 -- iter: 0096/1309
## Training Step: 7  | total loss: 0.69084
## 
| Adam | epoch: 001 | loss: 0.69084 -- iter: 0112/1309
## Training Step: 8  | total loss: 0.69009
## 
| Adam | epoch: 001 | loss: 0.69009 -- iter: 0128/1309
## Training Step: 9  | total loss: 0.68775
## 
| Adam | epoch: 001 | loss: 0.68775 -- iter: 0144/1309
## Training Step: 10  | total loss: 0.68380
## 
| Adam | epoch: 001 | loss: 0.68380 -- iter: 0160/1309
## Training Step: 11  | total loss: 0.68446
## 
| Adam | epoch: 001 | loss: 0.68446 -- iter: 0176/1309
## Training Step: 12  | total loss: 0.67775
## 
| Adam | epoch: 001 | loss: 0.67775 -- iter: 0192/1309
## Training Step: 13  | total loss: 0.68567
## 
| Adam | epoch: 001 | loss: 0.68567 -- iter: 0208/1309
## Training Step: 14  | total loss: 0.67682
## 
| Adam | epoch: 001 | loss: 0.67682 -- iter: 0224/1309
## Training Step: 15  | total loss: 0.67355
## 
| Adam | epoch: 001 | loss: 0.67355 -- iter: 0240/1309
## Training Step: 16  | total loss: 0.68062
## 
| Adam | epoch: 001 | loss: 0.68062 -- iter: 0256/1309
## Training Step: 17  | total loss: 0.67059
## 
| Adam | epoch: 001 | loss: 0.67059 -- iter: 0272/1309
## Training Step: 18  | total loss: 0.66260
## 
| Adam | epoch: 001 | loss: 0.66260 -- iter: 0288/1309
## Training Step: 19  | total loss: 0.66117
## 
| Adam | epoch: 001 | loss: 0.66117 -- iter: 0304/1309
## Training Step: 20  | total loss: 0.66160
## 
| Adam | epoch: 001 | loss: 0.66160 -- iter: 0320/1309
## Training Step: 21  | total loss: 0.65622
## 
| Adam | epoch: 001 | loss: 0.65622 -- iter: 0336/1309
## Training Step: 22  | total loss: 0.66092
## 
| Adam | epoch: 001 | loss: 0.66092 -- iter: 0352/1309
## Training Step: 23  | total loss: 0.64904
## 
| Adam | epoch: 001 | loss: 0.64904 -- iter: 0368/1309
## Training Step: 24  | total loss: 0.65021
## 
| Adam | epoch: 001 | loss: 0.65021 -- iter: 0384/1309
## Training Step: 25  | total loss: 0.64501
## 
| Adam | epoch: 001 | loss: 0.64501 -- iter: 0400/1309
## Training Step: 26  | total loss: 0.68222
## 
| Adam | epoch: 001 | loss: 0.68222 -- iter: 0416/1309
## Training Step: 27  | total loss: 0.68186
## 
| Adam | epoch: 001 | loss: 0.68186 -- iter: 0432/1309
## Training Step: 28  | total loss: 0.66725
## 
| Adam | epoch: 001 | loss: 0.66725 -- iter: 0448/1309
## Training Step: 29  | total loss: 0.66035
## 
| Adam | epoch: 001 | loss: 0.66035 -- iter: 0464/1309
## Training Step: 30  | total loss: 0.66366
## 
| Adam | epoch: 001 | loss: 0.66366 -- iter: 0480/1309
## Training Step: 31  | total loss: 0.66252
## 
| Adam | epoch: 001 | loss: 0.66252 -- iter: 0496/1309
## Training Step: 32  | total loss: 0.65841
## 
| Adam | epoch: 001 | loss: 0.65841 -- iter: 0512/1309
## Training Step: 33  | total loss: 0.63773
## 
| Adam | epoch: 001 | loss: 0.63773 -- iter: 0528/1309
## Training Step: 34  | total loss: 0.64177
## 
| Adam | epoch: 001 | loss: 0.64177 -- iter: 0544/1309
## Training Step: 35  | total loss: 0.66026
## 
| Adam | epoch: 001 | loss: 0.66026 -- iter: 0560/1309
## Training Step: 36  | total loss: 0.69326
## 
| Adam | epoch: 001 | loss: 0.69326 -- iter: 0576/1309
## Training Step: 37  | total loss: 0.66589
## 
| Adam | epoch: 001 | loss: 0.66589 -- iter: 0592/1309
## Training Step: 38  | total loss: 0.64924
## 
| Adam | epoch: 001 | loss: 0.64924 -- iter: 0608/1309
## Training Step: 39  | total loss: 0.64548
## 
| Adam | epoch: 001 | loss: 0.64548 -- iter: 0624/1309
## Training Step: 40  | total loss: 0.63763
## 
| Adam | epoch: 001 | loss: 0.63763 -- iter: 0640/1309
## Training Step: 41  | total loss: 0.67934
## 
| Adam | epoch: 001 | loss: 0.67934 -- iter: 0656/1309
## Training Step: 42  | total loss: 0.72584
## 
| Adam | epoch: 001 | loss: 0.72584 -- iter: 0672/1309
## Training Step: 43  | total loss: 0.70782
## 
| Adam | epoch: 001 | loss: 0.70782 -- iter: 0688/1309
## Training Step: 44  | total loss: 0.70890
## 
| Adam | epoch: 001 | loss: 0.70890 -- iter: 0704/1309
## Training Step: 45  | total loss: 0.69403
## 
| Adam | epoch: 001 | loss: 0.69403 -- iter: 0720/1309
## Training Step: 46  | total loss: 0.69799
## 
| Adam | epoch: 001 | loss: 0.69799 -- iter: 0736/1309
## Training Step: 47  | total loss: 0.69056
## 
| Adam | epoch: 001 | loss: 0.69056 -- iter: 0752/1309
## Training Step: 48  | total loss: 0.68293
## 
| Adam | epoch: 001 | loss: 0.68293 -- iter: 0768/1309
## Training Step: 49  | total loss: 0.67184
## 
| Adam | epoch: 001 | loss: 0.67184 -- iter: 0784/1309
## Training Step: 50  | total loss: 0.67261
## 
| Adam | epoch: 001 | loss: 0.67261 -- iter: 0800/1309
## Training Step: 51  | total loss: 0.67288
## 
| Adam | epoch: 001 | loss: 0.67288 -- iter: 0816/1309
## Training Step: 52  | total loss: 0.66129
## 
| Adam | epoch: 001 | loss: 0.66129 -- iter: 0832/1309
## Training Step: 53  | total loss: 0.65795
## 
| Adam | epoch: 001 | loss: 0.65795 -- iter: 0848/1309
## Training Step: 54  | total loss: 0.63502
## 
| Adam | epoch: 001 | loss: 0.63502 -- iter: 0864/1309
## Training Step: 55  | total loss: 0.64543
## 
| Adam | epoch: 001 | loss: 0.64543 -- iter: 0880/1309
## Training Step: 56  | total loss: 0.64112
## 
| Adam | epoch: 001 | loss: 0.64112 -- iter: 0896/1309
## Training Step: 57  | total loss: 0.64085
## 
| Adam | epoch: 001 | loss: 0.64085 -- iter: 0912/1309
## Training Step: 58  | total loss: 0.64628
## 
| Adam | epoch: 001 | loss: 0.64628 -- iter: 0928/1309
## Training Step: 59  | total loss: 0.65419
## 
| Adam | epoch: 001 | loss: 0.65419 -- iter: 0944/1309
## Training Step: 60  | total loss: 0.65015
## 
| Adam | epoch: 001 | loss: 0.65015 -- iter: 0960/1309
## Training Step: 61  | total loss: 0.65702
## 
| Adam | epoch: 001 | loss: 0.65702 -- iter: 0976/1309
## Training Step: 62  | total loss: 0.65542
## 
| Adam | epoch: 001 | loss: 0.65542 -- iter: 0992/1309
## Training Step: 63  | total loss: 0.67665
## 
| Adam | epoch: 001 | loss: 0.67665 -- iter: 1008/1309
## Training Step: 64  | total loss: 0.66618
## 
| Adam | epoch: 001 | loss: 0.66618 -- iter: 1024/1309
## Training Step: 65  | total loss: 0.65384
## 
| Adam | epoch: 001 | loss: 0.65384 -- iter: 1040/1309
## Training Step: 66  | total loss: 0.65121
## 
| Adam | epoch: 001 | loss: 0.65121 -- iter: 1056/1309
## Training Step: 67  | total loss: 0.65850
## 
| Adam | epoch: 001 | loss: 0.65850 -- iter: 1072/1309
## Training Step: 68  | total loss: 0.65625
## 
| Adam | epoch: 001 | loss: 0.65625 -- iter: 1088/1309
## Training Step: 69  | total loss: 0.64502
## 
| Adam | epoch: 001 | loss: 0.64502 -- iter: 1104/1309
## Training Step: 70  | total loss: 0.65894
## 
| Adam | epoch: 001 | loss: 0.65894 -- iter: 1120/1309
## Training Step: 71  | total loss: 0.65847
## 
| Adam | epoch: 001 | loss: 0.65847 -- iter: 1136/1309
## Training Step: 72  | total loss: 0.67057
## 
| Adam | epoch: 001 | loss: 0.67057 -- iter: 1152/1309
## Training Step: 73  | total loss: 0.66633
## 
| Adam | epoch: 001 | loss: 0.66633 -- iter: 1168/1309
## Training Step: 74  | total loss: 0.67553
## 
| Adam | epoch: 001 | loss: 0.67553 -- iter: 1184/1309
## Training Step: 75  | total loss: 0.66182
## 
| Adam | epoch: 001 | loss: 0.66182 -- iter: 1200/1309
## Training Step: 76  | total loss: 0.65571
## 
| Adam | epoch: 001 | loss: 0.65571 -- iter: 1216/1309
## Training Step: 77  | total loss: 0.66136
## 
| Adam | epoch: 001 | loss: 0.66136 -- iter: 1232/1309
## Training Step: 78  | total loss: 0.65617
## 
| Adam | epoch: 001 | loss: 0.65617 -- iter: 1248/1309
## Training Step: 79  | total loss: 0.65106
## 
| Adam | epoch: 001 | loss: 0.65106 -- iter: 1264/1309
## Training Step: 80  | total loss: 0.65181
## 
| Adam | epoch: 001 | loss: 0.65181 -- iter: 1280/1309
## Training Step: 81  | total loss: 0.65076
## 
| Adam | epoch: 001 | loss: 0.65076 -- iter: 1296/1309
## Training Step: 82  | total loss: 0.63598
## 
| Adam | epoch: 001 | loss: 0.63598 -- iter: 1309/1309
## Training Step: 82  | total loss: 0.63598
## 
| Adam | epoch: 001 | loss: 0.63598 -- iter: 1309/1309
## --
## Training Step: 83  | total loss: 0.62936
## 
| Adam | epoch: 002 | loss: 0.62936 -- iter: 0016/1309
## Training Step: 84  | total loss: 0.62307
## 
| Adam | epoch: 002 | loss: 0.62307 -- iter: 0032/1309
## Training Step: 85  | total loss: 0.62194
## 
| Adam | epoch: 002 | loss: 0.62194 -- iter: 0048/1309
## Training Step: 86  | total loss: 0.62587
## 
| Adam | epoch: 002 | loss: 0.62587 -- iter: 0064/1309
## Training Step: 87  | total loss: 0.61556
## 
| Adam | epoch: 002 | loss: 0.61556 -- iter: 0080/1309
## Training Step: 88  | total loss: 0.62776
## 
| Adam | epoch: 002 | loss: 0.62776 -- iter: 0096/1309
## Training Step: 89  | total loss: 0.62942
## 
| Adam | epoch: 002 | loss: 0.62942 -- iter: 0112/1309
## Training Step: 90  | total loss: 0.62614
## 
| Adam | epoch: 002 | loss: 0.62614 -- iter: 0128/1309
## Training Step: 91  | total loss: 0.61422
## 
| Adam | epoch: 002 | loss: 0.61422 -- iter: 0144/1309
## Training Step: 92  | total loss: 0.61096
## 
| Adam | epoch: 002 | loss: 0.61096 -- iter: 0160/1309
## Training Step: 93  | total loss: 0.60008
## 
| Adam | epoch: 002 | loss: 0.60008 -- iter: 0176/1309
## Training Step: 94  | total loss: 0.58198
## 
| Adam | epoch: 002 | loss: 0.58198 -- iter: 0192/1309
## Training Step: 95  | total loss: 0.58318
## 
| Adam | epoch: 002 | loss: 0.58318 -- iter: 0208/1309
## Training Step: 96  | total loss: 0.59722
## 
| Adam | epoch: 002 | loss: 0.59722 -- iter: 0224/1309
## Training Step: 97  | total loss: 0.60388
## 
| Adam | epoch: 002 | loss: 0.60388 -- iter: 0240/1309
## Training Step: 98  | total loss: 0.60225
## 
| Adam | epoch: 002 | loss: 0.60225 -- iter: 0256/1309
## Training Step: 99  | total loss: 0.60550
## 
| Adam | epoch: 002 | loss: 0.60550 -- iter: 0272/1309
## Training Step: 100  | total loss: 0.60809
## 
| Adam | epoch: 002 | loss: 0.60809 -- iter: 0288/1309
## Training Step: 101  | total loss: 0.60451
## 
| Adam | epoch: 002 | loss: 0.60451 -- iter: 0304/1309
## Training Step: 102  | total loss: 0.60749
## 
| Adam | epoch: 002 | loss: 0.60749 -- iter: 0320/1309
## Training Step: 103  | total loss: 0.60584
## 
| Adam | epoch: 002 | loss: 0.60584 -- iter: 0336/1309
## Training Step: 104  | total loss: 0.61162
## 
| Adam | epoch: 002 | loss: 0.61162 -- iter: 0352/1309
## Training Step: 105  | total loss: 0.62416
## 
| Adam | epoch: 002 | loss: 0.62416 -- iter: 0368/1309
## Training Step: 106  | total loss: 0.61926
## 
| Adam | epoch: 002 | loss: 0.61926 -- iter: 0384/1309
## Training Step: 107  | total loss: 0.63219
## 
| Adam | epoch: 002 | loss: 0.63219 -- iter: 0400/1309
## Training Step: 108  | total loss: 0.62841
## 
| Adam | epoch: 002 | loss: 0.62841 -- iter: 0416/1309
## Training Step: 109  | total loss: 0.63076
## 
| Adam | epoch: 002 | loss: 0.63076 -- iter: 0432/1309
## Training Step: 110  | total loss: 0.61710
## 
| Adam | epoch: 002 | loss: 0.61710 -- iter: 0448/1309
## Training Step: 111  | total loss: 0.62523
## 
| Adam | epoch: 002 | loss: 0.62523 -- iter: 0464/1309
## Training Step: 112  | total loss: 0.62778
## 
| Adam | epoch: 002 | loss: 0.62778 -- iter: 0480/1309
## Training Step: 113  | total loss: 0.62497
## 
| Adam | epoch: 002 | loss: 0.62497 -- iter: 0496/1309
## Training Step: 114  | total loss: 0.63505
## 
| Adam | epoch: 002 | loss: 0.63505 -- iter: 0512/1309
## Training Step: 115  | total loss: 0.63498
## 
| Adam | epoch: 002 | loss: 0.63498 -- iter: 0528/1309
## Training Step: 116  | total loss: 0.62581
## 
| Adam | epoch: 002 | loss: 0.62581 -- iter: 0544/1309
## Training Step: 117  | total loss: 0.63704
## 
| Adam | epoch: 002 | loss: 0.63704 -- iter: 0560/1309
## Training Step: 118  | total loss: 0.64470
## 
| Adam | epoch: 002 | loss: 0.64470 -- iter: 0576/1309
## Training Step: 119  | total loss: 0.64199
## 
| Adam | epoch: 002 | loss: 0.64199 -- iter: 0592/1309
## Training Step: 120  | total loss: 0.63240
## 
| Adam | epoch: 002 | loss: 0.63240 -- iter: 0608/1309
## Training Step: 121  | total loss: 0.63733
## 
| Adam | epoch: 002 | loss: 0.63733 -- iter: 0624/1309
## Training Step: 122  | total loss: 0.61953
## 
| Adam | epoch: 002 | loss: 0.61953 -- iter: 0640/1309
## Training Step: 123  | total loss: 0.61439
## 
| Adam | epoch: 002 | loss: 0.61439 -- iter: 0656/1309
## Training Step: 124  | total loss: 0.62697
## 
| Adam | epoch: 002 | loss: 0.62697 -- iter: 0672/1309
## Training Step: 125  | total loss: 0.63609
## 
| Adam | epoch: 002 | loss: 0.63609 -- iter: 0688/1309
## Training Step: 126  | total loss: 0.63995
## 
| Adam | epoch: 002 | loss: 0.63995 -- iter: 0704/1309
## Training Step: 127  | total loss: 0.62673
## 
| Adam | epoch: 002 | loss: 0.62673 -- iter: 0720/1309
## Training Step: 128  | total loss: 0.62341
## 
| Adam | epoch: 002 | loss: 0.62341 -- iter: 0736/1309
## Training Step: 129  | total loss: 0.65158
## 
| Adam | epoch: 002 | loss: 0.65158 -- iter: 0752/1309
## Training Step: 130  | total loss: 0.63829
## 
| Adam | epoch: 002 | loss: 0.63829 -- iter: 0768/1309
## Training Step: 131  | total loss: 0.64077
## 
| Adam | epoch: 002 | loss: 0.64077 -- iter: 0784/1309
## Training Step: 132  | total loss: 0.62772
## 
| Adam | epoch: 002 | loss: 0.62772 -- iter: 0800/1309
## Training Step: 133  | total loss: 0.62579
## 
| Adam | epoch: 002 | loss: 0.62579 -- iter: 0816/1309
## Training Step: 134  | total loss: 0.63264
## 
| Adam | epoch: 002 | loss: 0.63264 -- iter: 0832/1309
## Training Step: 135  | total loss: 0.63261
## 
| Adam | epoch: 002 | loss: 0.63261 -- iter: 0848/1309
## Training Step: 136  | total loss: 0.62780
## 
| Adam | epoch: 002 | loss: 0.62780 -- iter: 0864/1309
## Training Step: 137  | total loss: 0.62992
## 
| Adam | epoch: 002 | loss: 0.62992 -- iter: 0880/1309
## Training Step: 138  | total loss: 0.63048
## 
| Adam | epoch: 002 | loss: 0.63048 -- iter: 0896/1309
## Training Step: 139  | total loss: 0.62436
## 
| Adam | epoch: 002 | loss: 0.62436 -- iter: 0912/1309
## Training Step: 140  | total loss: 0.62723
## 
| Adam | epoch: 002 | loss: 0.62723 -- iter: 0928/1309
## Training Step: 141  | total loss: 0.61685
## 
| Adam | epoch: 002 | loss: 0.61685 -- iter: 0944/1309
## Training Step: 142  | total loss: 0.62022
## 
| Adam | epoch: 002 | loss: 0.62022 -- iter: 0960/1309
## Training Step: 143  | total loss: 0.63215
## 
| Adam | epoch: 002 | loss: 0.63215 -- iter: 0976/1309
## Training Step: 144  | total loss: 0.63188
## 
| Adam | epoch: 002 | loss: 0.63188 -- iter: 0992/1309
## Training Step: 145  | total loss: 0.64376
## 
| Adam | epoch: 002 | loss: 0.64376 -- iter: 1008/1309
## Training Step: 146  | total loss: 0.64600
## 
| Adam | epoch: 002 | loss: 0.64600 -- iter: 1024/1309
## Training Step: 147  | total loss: 0.65707
## 
| Adam | epoch: 002 | loss: 0.65707 -- iter: 1040/1309
## Training Step: 148  | total loss: 0.65105
## 
| Adam | epoch: 002 | loss: 0.65105 -- iter: 1056/1309
## Training Step: 149  | total loss: 0.63970
## 
| Adam | epoch: 002 | loss: 0.63970 -- iter: 1072/1309
## Training Step: 150  | total loss: 0.64384
## 
| Adam | epoch: 002 | loss: 0.64384 -- iter: 1088/1309
## Training Step: 151  | total loss: 0.64504
## 
| Adam | epoch: 002 | loss: 0.64504 -- iter: 1104/1309
## Training Step: 152  | total loss: 0.63971
## 
| Adam | epoch: 002 | loss: 0.63971 -- iter: 1120/1309
## Training Step: 153  | total loss: 0.63259
## 
| Adam | epoch: 002 | loss: 0.63259 -- iter: 1136/1309
## Training Step: 154  | total loss: 0.62744
## 
| Adam | epoch: 002 | loss: 0.62744 -- iter: 1152/1309
## Training Step: 155  | total loss: 0.63816
## 
| Adam | epoch: 002 | loss: 0.63816 -- iter: 1168/1309
## Training Step: 156  | total loss: 0.64241
## 
| Adam | epoch: 002 | loss: 0.64241 -- iter: 1184/1309
## Training Step: 157  | total loss: 0.64073
## 
| Adam | epoch: 002 | loss: 0.64073 -- iter: 1200/1309
## Training Step: 158  | total loss: 0.62911
## 
| Adam | epoch: 002 | loss: 0.62911 -- iter: 1216/1309
## Training Step: 159  | total loss: 0.62586
## 
| Adam | epoch: 002 | loss: 0.62586 -- iter: 1232/1309
## Training Step: 160  | total loss: 0.63055
## 
| Adam | epoch: 002 | loss: 0.63055 -- iter: 1248/1309
## Training Step: 161  | total loss: 0.63030
## 
| Adam | epoch: 002 | loss: 0.63030 -- iter: 1264/1309
## Training Step: 162  | total loss: 0.62806
## 
| Adam | epoch: 002 | loss: 0.62806 -- iter: 1280/1309
## Training Step: 163  | total loss: 0.63106
## 
| Adam | epoch: 002 | loss: 0.63106 -- iter: 1296/1309
## Training Step: 164  | total loss: 0.64570
## 
| Adam | epoch: 002 | loss: 0.64570 -- iter: 1309/1309
## Training Step: 164  | total loss: 0.64570
## 
| Adam | epoch: 002 | loss: 0.64570 -- iter: 1309/1309
## --
## Training Step: 165  | total loss: 0.64028
## 
| Adam | epoch: 003 | loss: 0.64028 -- iter: 0016/1309
## Training Step: 166  | total loss: 0.65350
## 
| Adam | epoch: 003 | loss: 0.65350 -- iter: 0032/1309
## Training Step: 167  | total loss: 0.66501
## 
| Adam | epoch: 003 | loss: 0.66501 -- iter: 0048/1309
## Training Step: 168  | total loss: 0.65905
## 
| Adam | epoch: 003 | loss: 0.65905 -- iter: 0064/1309
## Training Step: 169  | total loss: 0.65300
## 
| Adam | epoch: 003 | loss: 0.65300 -- iter: 0080/1309
## Training Step: 170  | total loss: 0.65155
## 
| Adam | epoch: 003 | loss: 0.65155 -- iter: 0096/1309
## Training Step: 171  | total loss: 0.64028
## 
| Adam | epoch: 003 | loss: 0.64028 -- iter: 0112/1309
## Training Step: 172  | total loss: 0.63399
## 
| Adam | epoch: 003 | loss: 0.63399 -- iter: 0128/1309
## Training Step: 173  | total loss: 0.62651
## 
| Adam | epoch: 003 | loss: 0.62651 -- iter: 0144/1309
## Training Step: 174  | total loss: 0.62327
## 
| Adam | epoch: 003 | loss: 0.62327 -- iter: 0160/1309
## Training Step: 175  | total loss: 0.62256
## 
| Adam | epoch: 003 | loss: 0.62256 -- iter: 0176/1309
## Training Step: 176  | total loss: 0.64239
## 
| Adam | epoch: 003 | loss: 0.64239 -- iter: 0192/1309
## Training Step: 177  | total loss: 0.64598
## 
| Adam | epoch: 003 | loss: 0.64598 -- iter: 0208/1309
## Training Step: 178  | total loss: 0.64351
## 
| Adam | epoch: 003 | loss: 0.64351 -- iter: 0224/1309
## Training Step: 179  | total loss: 0.64095
## 
| Adam | epoch: 003 | loss: 0.64095 -- iter: 0240/1309
## Training Step: 180  | total loss: 0.64561
## 
| Adam | epoch: 003 | loss: 0.64561 -- iter: 0256/1309
## Training Step: 181  | total loss: 0.63351
## 
| Adam | epoch: 003 | loss: 0.63351 -- iter: 0272/1309
## Training Step: 182  | total loss: 0.61889
## 
| Adam | epoch: 003 | loss: 0.61889 -- iter: 0288/1309
## Training Step: 183  | total loss: 0.60678
## 
| Adam | epoch: 003 | loss: 0.60678 -- iter: 0304/1309
## Training Step: 184  | total loss: 0.60386
## 
| Adam | epoch: 003 | loss: 0.60386 -- iter: 0320/1309
## Training Step: 185  | total loss: 0.61072
## 
| Adam | epoch: 003 | loss: 0.61072 -- iter: 0336/1309
## Training Step: 186  | total loss: 0.60743
## 
| Adam | epoch: 003 | loss: 0.60743 -- iter: 0352/1309
## Training Step: 187  | total loss: 0.60585
## 
| Adam | epoch: 003 | loss: 0.60585 -- iter: 0368/1309
## Training Step: 188  | total loss: 0.60800
## 
| Adam | epoch: 003 | loss: 0.60800 -- iter: 0384/1309
## Training Step: 189  | total loss: 0.61400
## 
| Adam | epoch: 003 | loss: 0.61400 -- iter: 0400/1309
## Training Step: 190  | total loss: 0.60394
## 
| Adam | epoch: 003 | loss: 0.60394 -- iter: 0416/1309
## Training Step: 191  | total loss: 0.59206
## 
| Adam | epoch: 003 | loss: 0.59206 -- iter: 0432/1309
## Training Step: 192  | total loss: 0.59280
## 
| Adam | epoch: 003 | loss: 0.59280 -- iter: 0448/1309
## Training Step: 193  | total loss: 0.59529
## 
| Adam | epoch: 003 | loss: 0.59529 -- iter: 0464/1309
## Training Step: 194  | total loss: 0.59558
## 
| Adam | epoch: 003 | loss: 0.59558 -- iter: 0480/1309
## Training Step: 195  | total loss: 0.59235
## 
| Adam | epoch: 003 | loss: 0.59235 -- iter: 0496/1309
## Training Step: 196  | total loss: 0.58214
## 
| Adam | epoch: 003 | loss: 0.58214 -- iter: 0512/1309
## Training Step: 197  | total loss: 0.56487
## 
| Adam | epoch: 003 | loss: 0.56487 -- iter: 0528/1309
## Training Step: 198  | total loss: 0.56536
## 
| Adam | epoch: 003 | loss: 0.56536 -- iter: 0544/1309
## Training Step: 199  | total loss: 0.55542
## 
| Adam | epoch: 003 | loss: 0.55542 -- iter: 0560/1309
## Training Step: 200  | total loss: 0.58240
## 
| Adam | epoch: 003 | loss: 0.58240 -- iter: 0576/1309
## Training Step: 201  | total loss: 0.58034
## 
| Adam | epoch: 003 | loss: 0.58034 -- iter: 0592/1309
## Training Step: 202  | total loss: 0.58814
## 
| Adam | epoch: 003 | loss: 0.58814 -- iter: 0608/1309
## Training Step: 203  | total loss: 0.57290
## 
| Adam | epoch: 003 | loss: 0.57290 -- iter: 0624/1309
## Training Step: 204  | total loss: 0.58492
## 
| Adam | epoch: 003 | loss: 0.58492 -- iter: 0640/1309
## Training Step: 205  | total loss: 0.59301
## 
| Adam | epoch: 003 | loss: 0.59301 -- iter: 0656/1309
## Training Step: 206  | total loss: 0.58316
## 
| Adam | epoch: 003 | loss: 0.58316 -- iter: 0672/1309
## Training Step: 207  | total loss: 0.59345
## 
| Adam | epoch: 003 | loss: 0.59345 -- iter: 0688/1309
## Training Step: 208  | total loss: 0.58564
## 
| Adam | epoch: 003 | loss: 0.58564 -- iter: 0704/1309
## Training Step: 209  | total loss: 0.58657
## 
| Adam | epoch: 003 | loss: 0.58657 -- iter: 0720/1309
## Training Step: 210  | total loss: 0.58406
## 
| Adam | epoch: 003 | loss: 0.58406 -- iter: 0736/1309
## Training Step: 211  | total loss: 0.56591
## 
| Adam | epoch: 003 | loss: 0.56591 -- iter: 0752/1309
## Training Step: 212  | total loss: 0.55788
## 
| Adam | epoch: 003 | loss: 0.55788 -- iter: 0768/1309
## Training Step: 213  | total loss: 0.57172
## 
| Adam | epoch: 003 | loss: 0.57172 -- iter: 0784/1309
## Training Step: 214  | total loss: 0.57342
## 
| Adam | epoch: 003 | loss: 0.57342 -- iter: 0800/1309
## Training Step: 215  | total loss: 0.56450
## 
| Adam | epoch: 003 | loss: 0.56450 -- iter: 0816/1309
## Training Step: 216  | total loss: 0.55292
## 
| Adam | epoch: 003 | loss: 0.55292 -- iter: 0832/1309
## Training Step: 217  | total loss: 0.52791
## 
| Adam | epoch: 003 | loss: 0.52791 -- iter: 0848/1309
## Training Step: 218  | total loss: 0.54016
## 
| Adam | epoch: 003 | loss: 0.54016 -- iter: 0864/1309
## Training Step: 219  | total loss: 0.56788
## 
| Adam | epoch: 003 | loss: 0.56788 -- iter: 0880/1309
## Training Step: 220  | total loss: 0.57008
## 
| Adam | epoch: 003 | loss: 0.57008 -- iter: 0896/1309
## Training Step: 221  | total loss: 0.58600
## 
| Adam | epoch: 003 | loss: 0.58600 -- iter: 0912/1309
## Training Step: 222  | total loss: 0.58522
## 
| Adam | epoch: 003 | loss: 0.58522 -- iter: 0928/1309
## Training Step: 223  | total loss: 0.57539
## 
| Adam | epoch: 003 | loss: 0.57539 -- iter: 0944/1309
## Training Step: 224  | total loss: 0.56883
## 
| Adam | epoch: 003 | loss: 0.56883 -- iter: 0960/1309
## Training Step: 225  | total loss: 0.67163
## 
| Adam | epoch: 003 | loss: 0.67163 -- iter: 0976/1309
## Training Step: 226  | total loss: 0.66065
## 
| Adam | epoch: 003 | loss: 0.66065 -- iter: 0992/1309
## Training Step: 227  | total loss: 0.70717
## 
| Adam | epoch: 003 | loss: 0.70717 -- iter: 1008/1309
## Training Step: 228  | total loss: 0.68692
## 
| Adam | epoch: 003 | loss: 0.68692 -- iter: 1024/1309
## Training Step: 229  | total loss: 0.67020
## 
| Adam | epoch: 003 | loss: 0.67020 -- iter: 1040/1309
## Training Step: 230  | total loss: 0.65148
## 
| Adam | epoch: 003 | loss: 0.65148 -- iter: 1056/1309
## Training Step: 231  | total loss: 0.64365
## 
| Adam | epoch: 003 | loss: 0.64365 -- iter: 1072/1309
## Training Step: 232  | total loss: 0.63155
## 
| Adam | epoch: 003 | loss: 0.63155 -- iter: 1088/1309
## Training Step: 233  | total loss: 0.62207
## 
| Adam | epoch: 003 | loss: 0.62207 -- iter: 1104/1309
## Training Step: 234  | total loss: 0.61800
## 
| Adam | epoch: 003 | loss: 0.61800 -- iter: 1120/1309
## Training Step: 235  | total loss: 0.61652
## 
| Adam | epoch: 003 | loss: 0.61652 -- iter: 1136/1309
## Training Step: 236  | total loss: 0.60647
## 
| Adam | epoch: 003 | loss: 0.60647 -- iter: 1152/1309
## Training Step: 237  | total loss: 0.59738
## 
| Adam | epoch: 003 | loss: 0.59738 -- iter: 1168/1309
## Training Step: 238  | total loss: 0.62096
## 
| Adam | epoch: 003 | loss: 0.62096 -- iter: 1184/1309
## Training Step: 239  | total loss: 0.61733
## 
| Adam | epoch: 003 | loss: 0.61733 -- iter: 1200/1309
## Training Step: 240  | total loss: 0.60355
## 
| Adam | epoch: 003 | loss: 0.60355 -- iter: 1216/1309
## Training Step: 241  | total loss: 0.60376
## 
| Adam | epoch: 003 | loss: 0.60376 -- iter: 1232/1309
## Training Step: 242  | total loss: 0.59539
## 
| Adam | epoch: 003 | loss: 0.59539 -- iter: 1248/1309
## Training Step: 243  | total loss: 0.60330
## 
| Adam | epoch: 003 | loss: 0.60330 -- iter: 1264/1309
## Training Step: 244  | total loss: 0.59053
## 
| Adam | epoch: 003 | loss: 0.59053 -- iter: 1280/1309
## Training Step: 245  | total loss: 0.58757
## 
| Adam | epoch: 003 | loss: 0.58757 -- iter: 1296/1309
## Training Step: 246  | total loss: 0.60208
## 
| Adam | epoch: 003 | loss: 0.60208 -- iter: 1309/1309
## Training Step: 246  | total loss: 0.60208
## 
| Adam | epoch: 003 | loss: 0.60208 -- iter: 1309/1309
## --
## Training Step: 247  | total loss: 0.58904
## 
| Adam | epoch: 004 | loss: 0.58904 -- iter: 0016/1309
## Training Step: 248  | total loss: 0.59313
## 
| Adam | epoch: 004 | loss: 0.59313 -- iter: 0032/1309
## Training Step: 249  | total loss: 0.58757
## 
| Adam | epoch: 004 | loss: 0.58757 -- iter: 0048/1309
## Training Step: 250  | total loss: 0.58262
## 
| Adam | epoch: 004 | loss: 0.58262 -- iter: 0064/1309
## Training Step: 251  | total loss: 0.57608
## 
| Adam | epoch: 004 | loss: 0.57608 -- iter: 0080/1309
## Training Step: 252  | total loss: 0.56900
## 
| Adam | epoch: 004 | loss: 0.56900 -- iter: 0096/1309
## Training Step: 253  | total loss: 0.56565
## 
| Adam | epoch: 004 | loss: 0.56565 -- iter: 0112/1309
## Training Step: 254  | total loss: 0.56027
## 
| Adam | epoch: 004 | loss: 0.56027 -- iter: 0128/1309
## Training Step: 255  | total loss: 0.55442
## 
| Adam | epoch: 004 | loss: 0.55442 -- iter: 0144/1309
## Training Step: 256  | total loss: 0.55393
## 
| Adam | epoch: 004 | loss: 0.55393 -- iter: 0160/1309
## Training Step: 257  | total loss: 0.55163
## 
| Adam | epoch: 004 | loss: 0.55163 -- iter: 0176/1309
## Training Step: 258  | total loss: 0.54357
## 
| Adam | epoch: 004 | loss: 0.54357 -- iter: 0192/1309
## Training Step: 259  | total loss: 0.55113
## 
| Adam | epoch: 004 | loss: 0.55113 -- iter: 0208/1309
## Training Step: 260  | total loss: 0.55031
## 
| Adam | epoch: 004 | loss: 0.55031 -- iter: 0224/1309
## Training Step: 261  | total loss: 0.56082
## 
| Adam | epoch: 004 | loss: 0.56082 -- iter: 0240/1309
## Training Step: 262  | total loss: 0.58804
## 
| Adam | epoch: 004 | loss: 0.58804 -- iter: 0256/1309
## Training Step: 263  | total loss: 0.60198
## 
| Adam | epoch: 004 | loss: 0.60198 -- iter: 0272/1309
## Training Step: 264  | total loss: 0.59900
## 
| Adam | epoch: 004 | loss: 0.59900 -- iter: 0288/1309
## Training Step: 265  | total loss: 0.57824
## 
| Adam | epoch: 004 | loss: 0.57824 -- iter: 0304/1309
## Training Step: 266  | total loss: 0.57968
## 
| Adam | epoch: 004 | loss: 0.57968 -- iter: 0320/1309
## Training Step: 267  | total loss: 0.58085
## 
| Adam | epoch: 004 | loss: 0.58085 -- iter: 0336/1309
## Training Step: 268  | total loss: 0.58790
## 
| Adam | epoch: 004 | loss: 0.58790 -- iter: 0352/1309
## Training Step: 269  | total loss: 0.57950
## 
| Adam | epoch: 004 | loss: 0.57950 -- iter: 0368/1309
## Training Step: 270  | total loss: 0.58130
## 
| Adam | epoch: 004 | loss: 0.58130 -- iter: 0384/1309
## Training Step: 271  | total loss: 0.56741
## 
| Adam | epoch: 004 | loss: 0.56741 -- iter: 0400/1309
## Training Step: 272  | total loss: 0.55103
## 
| Adam | epoch: 004 | loss: 0.55103 -- iter: 0416/1309
## Training Step: 273  | total loss: 0.54320
## 
| Adam | epoch: 004 | loss: 0.54320 -- iter: 0432/1309
## Training Step: 274  | total loss: 0.53017
## 
| Adam | epoch: 004 | loss: 0.53017 -- iter: 0448/1309
## Training Step: 275  | total loss: 0.51948
## 
| Adam | epoch: 004 | loss: 0.51948 -- iter: 0464/1309
## Training Step: 276  | total loss: 0.51908
## 
| Adam | epoch: 004 | loss: 0.51908 -- iter: 0480/1309
## Training Step: 277  | total loss: 0.51421
## 
| Adam | epoch: 004 | loss: 0.51421 -- iter: 0496/1309
## Training Step: 278  | total loss: 0.54710
## 
| Adam | epoch: 004 | loss: 0.54710 -- iter: 0512/1309
## Training Step: 279  | total loss: 0.56127
## 
| Adam | epoch: 004 | loss: 0.56127 -- iter: 0528/1309
## Training Step: 280  | total loss: 0.55881
## 
| Adam | epoch: 004 | loss: 0.55881 -- iter: 0544/1309
## Training Step: 281  | total loss: 0.56014
## 
| Adam | epoch: 004 | loss: 0.56014 -- iter: 0560/1309
## Training Step: 282  | total loss: 0.58086
## 
| Adam | epoch: 004 | loss: 0.58086 -- iter: 0576/1309
## Training Step: 283  | total loss: 0.60233
## 
| Adam | epoch: 004 | loss: 0.60233 -- iter: 0592/1309
## Training Step: 284  | total loss: 0.59179
## 
| Adam | epoch: 004 | loss: 0.59179 -- iter: 0608/1309
## Training Step: 285  | total loss: 0.60447
## 
| Adam | epoch: 004 | loss: 0.60447 -- iter: 0624/1309
## Training Step: 286  | total loss: 0.60877
## 
| Adam | epoch: 004 | loss: 0.60877 -- iter: 0640/1309
## Training Step: 287  | total loss: 0.61401
## 
| Adam | epoch: 004 | loss: 0.61401 -- iter: 0656/1309
## Training Step: 288  | total loss: 0.62061
## 
| Adam | epoch: 004 | loss: 0.62061 -- iter: 0672/1309
## Training Step: 289  | total loss: 0.63692
## 
| Adam | epoch: 004 | loss: 0.63692 -- iter: 0688/1309
## Training Step: 290  | total loss: 0.62295
## 
| Adam | epoch: 004 | loss: 0.62295 -- iter: 0704/1309
## Training Step: 291  | total loss: 0.61024
## 
| Adam | epoch: 004 | loss: 0.61024 -- iter: 0720/1309
## Training Step: 292  | total loss: 0.59790
## 
| Adam | epoch: 004 | loss: 0.59790 -- iter: 0736/1309
## Training Step: 293  | total loss: 0.59109
## 
| Adam | epoch: 004 | loss: 0.59109 -- iter: 0752/1309
## Training Step: 294  | total loss: 0.58131
## 
| Adam | epoch: 004 | loss: 0.58131 -- iter: 0768/1309
## Training Step: 295  | total loss: 0.58081
## 
| Adam | epoch: 004 | loss: 0.58081 -- iter: 0784/1309
## Training Step: 296  | total loss: 0.56548
## 
| Adam | epoch: 004 | loss: 0.56548 -- iter: 0800/1309
## Training Step: 297  | total loss: 0.55767
## 
| Adam | epoch: 004 | loss: 0.55767 -- iter: 0816/1309
## Training Step: 298  | total loss: 0.54686
## 
| Adam | epoch: 004 | loss: 0.54686 -- iter: 0832/1309
## Training Step: 299  | total loss: 0.55519
## 
| Adam | epoch: 004 | loss: 0.55519 -- iter: 0848/1309
## Training Step: 300  | total loss: 0.57195
## 
| Adam | epoch: 004 | loss: 0.57195 -- iter: 0864/1309
## Training Step: 301  | total loss: 0.58178
## 
| Adam | epoch: 004 | loss: 0.58178 -- iter: 0880/1309
## Training Step: 302  | total loss: 0.58497
## 
| Adam | epoch: 004 | loss: 0.58497 -- iter: 0896/1309
## Training Step: 303  | total loss: 0.59673
## 
| Adam | epoch: 004 | loss: 0.59673 -- iter: 0912/1309
## Training Step: 304  | total loss: 0.58069
## 
| Adam | epoch: 004 | loss: 0.58069 -- iter: 0928/1309
## Training Step: 305  | total loss: 0.57820
## 
| Adam | epoch: 004 | loss: 0.57820 -- iter: 0944/1309
## Training Step: 306  | total loss: 0.57575
## 
| Adam | epoch: 004 | loss: 0.57575 -- iter: 0960/1309
## Training Step: 307  | total loss: 0.54906
## 
| Adam | epoch: 004 | loss: 0.54906 -- iter: 0976/1309
## Training Step: 308  | total loss: 0.55945
## 
| Adam | epoch: 004 | loss: 0.55945 -- iter: 0992/1309
## Training Step: 309  | total loss: 0.56265
## 
| Adam | epoch: 004 | loss: 0.56265 -- iter: 1008/1309
## Training Step: 310  | total loss: 0.55383
## 
| Adam | epoch: 004 | loss: 0.55383 -- iter: 1024/1309
## Training Step: 311  | total loss: 0.53683
## 
| Adam | epoch: 004 | loss: 0.53683 -- iter: 1040/1309
## Training Step: 312  | total loss: 0.53846
## 
| Adam | epoch: 004 | loss: 0.53846 -- iter: 1056/1309
## Training Step: 313  | total loss: 0.51816
## 
| Adam | epoch: 004 | loss: 0.51816 -- iter: 1072/1309
## Training Step: 314  | total loss: 0.51707
## 
| Adam | epoch: 004 | loss: 0.51707 -- iter: 1088/1309
## Training Step: 315  | total loss: 0.51637
## 
| Adam | epoch: 004 | loss: 0.51637 -- iter: 1104/1309
## Training Step: 316  | total loss: 0.51711
## 
| Adam | epoch: 004 | loss: 0.51711 -- iter: 1120/1309
## Training Step: 317  | total loss: 0.51210
## 
| Adam | epoch: 004 | loss: 0.51210 -- iter: 1136/1309
## Training Step: 318  | total loss: 0.51969
## 
| Adam | epoch: 004 | loss: 0.51969 -- iter: 1152/1309
## Training Step: 319  | total loss: 0.53966
## 
| Adam | epoch: 004 | loss: 0.53966 -- iter: 1168/1309
## Training Step: 320  | total loss: 0.51365
## 
| Adam | epoch: 004 | loss: 0.51365 -- iter: 1184/1309
## Training Step: 321  | total loss: 0.52174
## 
| Adam | epoch: 004 | loss: 0.52174 -- iter: 1200/1309
## Training Step: 322  | total loss: 0.52542
## 
| Adam | epoch: 004 | loss: 0.52542 -- iter: 1216/1309
## Training Step: 323  | total loss: 0.53396
## 
| Adam | epoch: 004 | loss: 0.53396 -- iter: 1232/1309
## Training Step: 324  | total loss: 0.55407
## 
| Adam | epoch: 004 | loss: 0.55407 -- iter: 1248/1309
## Training Step: 325  | total loss: 0.55854
## 
| Adam | epoch: 004 | loss: 0.55854 -- iter: 1264/1309
## Training Step: 326  | total loss: 0.56493
## 
| Adam | epoch: 004 | loss: 0.56493 -- iter: 1280/1309
## Training Step: 327  | total loss: 0.56765
## 
| Adam | epoch: 004 | loss: 0.56765 -- iter: 1296/1309
## Training Step: 328  | total loss: 0.57511
## 
| Adam | epoch: 004 | loss: 0.57511 -- iter: 1309/1309
## Training Step: 328  | total loss: 0.57511
## 
| Adam | epoch: 004 | loss: 0.57511 -- iter: 1309/1309
## --
## Training Step: 329  | total loss: 0.59911
## 
| Adam | epoch: 005 | loss: 0.59911 -- iter: 0016/1309
## Training Step: 330  | total loss: 0.59677
## 
| Adam | epoch: 005 | loss: 0.59677 -- iter: 0032/1309
## Training Step: 331  | total loss: 0.59434
## 
| Adam | epoch: 005 | loss: 0.59434 -- iter: 0048/1309
## Training Step: 332  | total loss: 0.60990
## 
| Adam | epoch: 005 | loss: 0.60990 -- iter: 0064/1309
## Training Step: 333  | total loss: 0.62114
## 
| Adam | epoch: 005 | loss: 0.62114 -- iter: 0080/1309
## Training Step: 334  | total loss: 0.61200
## 
| Adam | epoch: 005 | loss: 0.61200 -- iter: 0096/1309
## Training Step: 335  | total loss: 0.60401
## 
| Adam | epoch: 005 | loss: 0.60401 -- iter: 0112/1309
## Training Step: 336  | total loss: 0.60840
## 
| Adam | epoch: 005 | loss: 0.60840 -- iter: 0128/1309
## Training Step: 337  | total loss: 0.61530
## 
| Adam | epoch: 005 | loss: 0.61530 -- iter: 0144/1309
## Training Step: 338  | total loss: 0.60168
## 
| Adam | epoch: 005 | loss: 0.60168 -- iter: 0160/1309
## Training Step: 339  | total loss: 0.59425
## 
| Adam | epoch: 005 | loss: 0.59425 -- iter: 0176/1309
## Training Step: 340  | total loss: 0.58197
## 
| Adam | epoch: 005 | loss: 0.58197 -- iter: 0192/1309
## Training Step: 341  | total loss: 0.58887
## 
| Adam | epoch: 005 | loss: 0.58887 -- iter: 0208/1309
## Training Step: 342  | total loss: 0.58918
## 
| Adam | epoch: 005 | loss: 0.58918 -- iter: 0224/1309
## Training Step: 343  | total loss: 0.59172
## 
| Adam | epoch: 005 | loss: 0.59172 -- iter: 0240/1309
## Training Step: 344  | total loss: 0.58281
## 
| Adam | epoch: 005 | loss: 0.58281 -- iter: 0256/1309
## Training Step: 345  | total loss: 0.58181
## 
| Adam | epoch: 005 | loss: 0.58181 -- iter: 0272/1309
## Training Step: 346  | total loss: 0.60307
## 
| Adam | epoch: 005 | loss: 0.60307 -- iter: 0288/1309
## Training Step: 347  | total loss: 0.61307
## 
| Adam | epoch: 005 | loss: 0.61307 -- iter: 0304/1309
## Training Step: 348  | total loss: 0.62016
## 
| Adam | epoch: 005 | loss: 0.62016 -- iter: 0320/1309
## Training Step: 349  | total loss: 0.60497
## 
| Adam | epoch: 005 | loss: 0.60497 -- iter: 0336/1309
## Training Step: 350  | total loss: 0.60172
## 
| Adam | epoch: 005 | loss: 0.60172 -- iter: 0352/1309
## Training Step: 351  | total loss: 0.58619
## 
| Adam | epoch: 005 | loss: 0.58619 -- iter: 0368/1309
## Training Step: 352  | total loss: 0.60541
## 
| Adam | epoch: 005 | loss: 0.60541 -- iter: 0384/1309
## Training Step: 353  | total loss: 0.60002
## 
| Adam | epoch: 005 | loss: 0.60002 -- iter: 0400/1309
## Training Step: 354  | total loss: 0.59073
## 
| Adam | epoch: 005 | loss: 0.59073 -- iter: 0416/1309
## Training Step: 355  | total loss: 0.57698
## 
| Adam | epoch: 005 | loss: 0.57698 -- iter: 0432/1309
## Training Step: 356  | total loss: 0.56312
## 
| Adam | epoch: 005 | loss: 0.56312 -- iter: 0448/1309
## Training Step: 357  | total loss: 0.56093
## 
| Adam | epoch: 005 | loss: 0.56093 -- iter: 0464/1309
## Training Step: 358  | total loss: 0.55823
## 
| Adam | epoch: 005 | loss: 0.55823 -- iter: 0480/1309
## Training Step: 359  | total loss: 0.54778
## 
| Adam | epoch: 005 | loss: 0.54778 -- iter: 0496/1309
## Training Step: 360  | total loss: 0.54495
## 
| Adam | epoch: 005 | loss: 0.54495 -- iter: 0512/1309
## Training Step: 361  | total loss: 0.54497
## 
| Adam | epoch: 005 | loss: 0.54497 -- iter: 0528/1309
## Training Step: 362  | total loss: 0.56057
## 
| Adam | epoch: 005 | loss: 0.56057 -- iter: 0544/1309
## Training Step: 363  | total loss: 0.54822
## 
| Adam | epoch: 005 | loss: 0.54822 -- iter: 0560/1309
## Training Step: 364  | total loss: 0.54325
## 
| Adam | epoch: 005 | loss: 0.54325 -- iter: 0576/1309
## Training Step: 365  | total loss: 0.54872
## 
| Adam | epoch: 005 | loss: 0.54872 -- iter: 0592/1309
## Training Step: 366  | total loss: 0.54574
## 
| Adam | epoch: 005 | loss: 0.54574 -- iter: 0608/1309
## Training Step: 367  | total loss: 0.53003
## 
| Adam | epoch: 005 | loss: 0.53003 -- iter: 0624/1309
## Training Step: 368  | total loss: 0.52612
## 
| Adam | epoch: 005 | loss: 0.52612 -- iter: 0640/1309
## Training Step: 369  | total loss: 0.54697
## 
| Adam | epoch: 005 | loss: 0.54697 -- iter: 0656/1309
## Training Step: 370  | total loss: 0.54982
## 
| Adam | epoch: 005 | loss: 0.54982 -- iter: 0672/1309
## Training Step: 371  | total loss: 0.54231
## 
| Adam | epoch: 005 | loss: 0.54231 -- iter: 0688/1309
## Training Step: 372  | total loss: 0.55848
## 
| Adam | epoch: 005 | loss: 0.55848 -- iter: 0704/1309
## Training Step: 373  | total loss: 0.55166
## 
| Adam | epoch: 005 | loss: 0.55166 -- iter: 0720/1309
## Training Step: 374  | total loss: 0.54261
## 
| Adam | epoch: 005 | loss: 0.54261 -- iter: 0736/1309
## Training Step: 375  | total loss: 0.55012
## 
| Adam | epoch: 005 | loss: 0.55012 -- iter: 0752/1309
## Training Step: 376  | total loss: 0.53514
## 
| Adam | epoch: 005 | loss: 0.53514 -- iter: 0768/1309
## Training Step: 377  | total loss: 0.58191
## 
| Adam | epoch: 005 | loss: 0.58191 -- iter: 0784/1309
## Training Step: 378  | total loss: 0.56494
## 
| Adam | epoch: 005 | loss: 0.56494 -- iter: 0800/1309
## Training Step: 379  | total loss: 0.56030
## 
| Adam | epoch: 005 | loss: 0.56030 -- iter: 0816/1309
## Training Step: 380  | total loss: 0.55466
## 
| Adam | epoch: 005 | loss: 0.55466 -- iter: 0832/1309
## Training Step: 381  | total loss: 0.56226
## 
| Adam | epoch: 005 | loss: 0.56226 -- iter: 0848/1309
## Training Step: 382  | total loss: 0.57257
## 
| Adam | epoch: 005 | loss: 0.57257 -- iter: 0864/1309
## Training Step: 383  | total loss: 0.55733
## 
| Adam | epoch: 005 | loss: 0.55733 -- iter: 0880/1309
## Training Step: 384  | total loss: 0.54731
## 
| Adam | epoch: 005 | loss: 0.54731 -- iter: 0896/1309
## Training Step: 385  | total loss: 0.55649
## 
| Adam | epoch: 005 | loss: 0.55649 -- iter: 0912/1309
## Training Step: 386  | total loss: 0.56103
## 
| Adam | epoch: 005 | loss: 0.56103 -- iter: 0928/1309
## Training Step: 387  | total loss: 0.53059
## 
| Adam | epoch: 005 | loss: 0.53059 -- iter: 0944/1309
## Training Step: 388  | total loss: 0.50710
## 
| Adam | epoch: 005 | loss: 0.50710 -- iter: 0960/1309
## Training Step: 389  | total loss: 0.53147
## 
| Adam | epoch: 005 | loss: 0.53147 -- iter: 0976/1309
## Training Step: 390  | total loss: 0.52775
## 
| Adam | epoch: 005 | loss: 0.52775 -- iter: 0992/1309
## Training Step: 391  | total loss: 0.53337
## 
| Adam | epoch: 005 | loss: 0.53337 -- iter: 1008/1309
## Training Step: 392  | total loss: 0.54501
## 
| Adam | epoch: 005 | loss: 0.54501 -- iter: 1024/1309
## Training Step: 393  | total loss: 0.53330
## 
| Adam | epoch: 005 | loss: 0.53330 -- iter: 1040/1309
## Training Step: 394  | total loss: 0.54652
## 
| Adam | epoch: 005 | loss: 0.54652 -- iter: 1056/1309
## Training Step: 395  | total loss: 0.54738
## 
| Adam | epoch: 005 | loss: 0.54738 -- iter: 1072/1309
## Training Step: 396  | total loss: 0.54070
## 
| Adam | epoch: 005 | loss: 0.54070 -- iter: 1088/1309
## Training Step: 397  | total loss: 0.53361
## 
| Adam | epoch: 005 | loss: 0.53361 -- iter: 1104/1309
## Training Step: 398  | total loss: 0.53442
## 
| Adam | epoch: 005 | loss: 0.53442 -- iter: 1120/1309
## Training Step: 399  | total loss: 0.53920
## 
| Adam | epoch: 005 | loss: 0.53920 -- iter: 1136/1309
## Training Step: 400  | total loss: 0.52721
## 
| Adam | epoch: 005 | loss: 0.52721 -- iter: 1152/1309
## Training Step: 401  | total loss: 0.53453
## 
| Adam | epoch: 005 | loss: 0.53453 -- iter: 1168/1309
## Training Step: 402  | total loss: 0.52093
## 
| Adam | epoch: 005 | loss: 0.52093 -- iter: 1184/1309
## Training Step: 403  | total loss: 0.51727
## 
| Adam | epoch: 005 | loss: 0.51727 -- iter: 1200/1309
## Training Step: 404  | total loss: 0.52211
## 
| Adam | epoch: 005 | loss: 0.52211 -- iter: 1216/1309
## Training Step: 405  | total loss: 0.51617
## 
| Adam | epoch: 005 | loss: 0.51617 -- iter: 1232/1309
## Training Step: 406  | total loss: 0.53185
## 
| Adam | epoch: 005 | loss: 0.53185 -- iter: 1248/1309
## Training Step: 407  | total loss: 0.54132
## 
| Adam | epoch: 005 | loss: 0.54132 -- iter: 1264/1309
## Training Step: 408  | total loss: 0.54975
## 
| Adam | epoch: 005 | loss: 0.54975 -- iter: 1280/1309
## Training Step: 409  | total loss: 0.55429
## 
| Adam | epoch: 005 | loss: 0.55429 -- iter: 1296/1309
## Training Step: 410  | total loss: 0.55120
## 
| Adam | epoch: 005 | loss: 0.55120 -- iter: 1309/1309
## Training Step: 410  | total loss: 0.55120
## 
| Adam | epoch: 005 | loss: 0.55120 -- iter: 1309/1309
## --
## Training Step: 411  | total loss: 0.53384
## 
| Adam | epoch: 006 | loss: 0.53384 -- iter: 0016/1309
## Training Step: 412  | total loss: 0.52120
## 
| Adam | epoch: 006 | loss: 0.52120 -- iter: 0032/1309
## Training Step: 413  | total loss: 0.51288
## 
| Adam | epoch: 006 | loss: 0.51288 -- iter: 0048/1309
## Training Step: 414  | total loss: 0.51588
## 
| Adam | epoch: 006 | loss: 0.51588 -- iter: 0064/1309
## Training Step: 415  | total loss: 0.50365
## 
| Adam | epoch: 006 | loss: 0.50365 -- iter: 0080/1309
## Training Step: 416  | total loss: 0.49122
## 
| Adam | epoch: 006 | loss: 0.49122 -- iter: 0096/1309
## Training Step: 417  | total loss: 0.49233
## 
| Adam | epoch: 006 | loss: 0.49233 -- iter: 0112/1309
## Training Step: 418  | total loss: 0.51224
## 
| Adam | epoch: 006 | loss: 0.51224 -- iter: 0128/1309
## Training Step: 419  | total loss: 0.50858
## 
| Adam | epoch: 006 | loss: 0.50858 -- iter: 0144/1309
## Training Step: 420  | total loss: 0.50314
## 
| Adam | epoch: 006 | loss: 0.50314 -- iter: 0160/1309
## Training Step: 421  | total loss: 0.53795
## 
| Adam | epoch: 006 | loss: 0.53795 -- iter: 0176/1309
## Training Step: 422  | total loss: 0.55880
## 
| Adam | epoch: 006 | loss: 0.55880 -- iter: 0192/1309
## Training Step: 423  | total loss: 0.54874
## 
| Adam | epoch: 006 | loss: 0.54874 -- iter: 0208/1309
## Training Step: 424  | total loss: 0.54355
## 
| Adam | epoch: 006 | loss: 0.54355 -- iter: 0224/1309
## Training Step: 425  | total loss: 0.53506
## 
| Adam | epoch: 006 | loss: 0.53506 -- iter: 0240/1309
## Training Step: 426  | total loss: 0.53575
## 
| Adam | epoch: 006 | loss: 0.53575 -- iter: 0256/1309
## Training Step: 427  | total loss: 0.55726
## 
| Adam | epoch: 006 | loss: 0.55726 -- iter: 0272/1309
## Training Step: 428  | total loss: 0.55200
## 
| Adam | epoch: 006 | loss: 0.55200 -- iter: 0288/1309
## Training Step: 429  | total loss: 0.57691
## 
| Adam | epoch: 006 | loss: 0.57691 -- iter: 0304/1309
## Training Step: 430  | total loss: 0.57154
## 
| Adam | epoch: 006 | loss: 0.57154 -- iter: 0320/1309
## Training Step: 431  | total loss: 0.55835
## 
| Adam | epoch: 006 | loss: 0.55835 -- iter: 0336/1309
## Training Step: 432  | total loss: 0.57252
## 
| Adam | epoch: 006 | loss: 0.57252 -- iter: 0352/1309
## Training Step: 433  | total loss: 0.58340
## 
| Adam | epoch: 006 | loss: 0.58340 -- iter: 0368/1309
## Training Step: 434  | total loss: 0.59046
## 
| Adam | epoch: 006 | loss: 0.59046 -- iter: 0384/1309
## Training Step: 435  | total loss: 0.57644
## 
| Adam | epoch: 006 | loss: 0.57644 -- iter: 0400/1309
## Training Step: 436  | total loss: 0.57187
## 
| Adam | epoch: 006 | loss: 0.57187 -- iter: 0416/1309
## Training Step: 437  | total loss: 0.55992
## 
| Adam | epoch: 006 | loss: 0.55992 -- iter: 0432/1309
## Training Step: 438  | total loss: 0.54293
## 
| Adam | epoch: 006 | loss: 0.54293 -- iter: 0448/1309
## Training Step: 439  | total loss: 0.54211
## 
| Adam | epoch: 006 | loss: 0.54211 -- iter: 0464/1309
## Training Step: 440  | total loss: 0.52262
## 
| Adam | epoch: 006 | loss: 0.52262 -- iter: 0480/1309
## Training Step: 441  | total loss: 0.51150
## 
| Adam | epoch: 006 | loss: 0.51150 -- iter: 0496/1309
## Training Step: 442  | total loss: 0.51424
## 
| Adam | epoch: 006 | loss: 0.51424 -- iter: 0512/1309
## Training Step: 443  | total loss: 0.52442
## 
| Adam | epoch: 006 | loss: 0.52442 -- iter: 0528/1309
## Training Step: 444  | total loss: 0.51202
## 
| Adam | epoch: 006 | loss: 0.51202 -- iter: 0544/1309
## Training Step: 445  | total loss: 0.49032
## 
| Adam | epoch: 006 | loss: 0.49032 -- iter: 0560/1309
## Training Step: 446  | total loss: 0.50445
## 
| Adam | epoch: 006 | loss: 0.50445 -- iter: 0576/1309
## Training Step: 447  | total loss: 0.50811
## 
| Adam | epoch: 006 | loss: 0.50811 -- iter: 0592/1309
## Training Step: 448  | total loss: 0.52500
## 
| Adam | epoch: 006 | loss: 0.52500 -- iter: 0608/1309
## Training Step: 449  | total loss: 0.55719
## 
| Adam | epoch: 006 | loss: 0.55719 -- iter: 0624/1309
## Training Step: 450  | total loss: 0.55204
## 
| Adam | epoch: 006 | loss: 0.55204 -- iter: 0640/1309
## Training Step: 451  | total loss: 0.56567
## 
| Adam | epoch: 006 | loss: 0.56567 -- iter: 0656/1309
## Training Step: 452  | total loss: 0.56934
## 
| Adam | epoch: 006 | loss: 0.56934 -- iter: 0672/1309
## Training Step: 453  | total loss: 0.55788
## 
| Adam | epoch: 006 | loss: 0.55788 -- iter: 0688/1309
## Training Step: 454  | total loss: 0.56944
## 
| Adam | epoch: 006 | loss: 0.56944 -- iter: 0704/1309
## Training Step: 455  | total loss: 0.55731
## 
| Adam | epoch: 006 | loss: 0.55731 -- iter: 0720/1309
## Training Step: 456  | total loss: 0.55300
## 
| Adam | epoch: 006 | loss: 0.55300 -- iter: 0736/1309
## Training Step: 457  | total loss: 0.55490
## 
| Adam | epoch: 006 | loss: 0.55490 -- iter: 0752/1309
## Training Step: 458  | total loss: 0.54596
## 
| Adam | epoch: 006 | loss: 0.54596 -- iter: 0768/1309
## Training Step: 459  | total loss: 0.56047
## 
| Adam | epoch: 006 | loss: 0.56047 -- iter: 0784/1309
## Training Step: 460  | total loss: 0.54280
## 
| Adam | epoch: 006 | loss: 0.54280 -- iter: 0800/1309
## Training Step: 461  | total loss: 0.52995
## 
| Adam | epoch: 006 | loss: 0.52995 -- iter: 0816/1309
## Training Step: 462  | total loss: 0.54352
## 
| Adam | epoch: 006 | loss: 0.54352 -- iter: 0832/1309
## Training Step: 463  | total loss: 0.53405
## 
| Adam | epoch: 006 | loss: 0.53405 -- iter: 0848/1309
## Training Step: 464  | total loss: 0.53379
## 
| Adam | epoch: 006 | loss: 0.53379 -- iter: 0864/1309
## Training Step: 465  | total loss: 0.53382
## 
| Adam | epoch: 006 | loss: 0.53382 -- iter: 0880/1309
## Training Step: 466  | total loss: 0.56037
## 
| Adam | epoch: 006 | loss: 0.56037 -- iter: 0896/1309
## Training Step: 467  | total loss: 0.57653
## 
| Adam | epoch: 006 | loss: 0.57653 -- iter: 0912/1309
## Training Step: 468  | total loss: 0.57010
## 
| Adam | epoch: 006 | loss: 0.57010 -- iter: 0928/1309
## Training Step: 469  | total loss: 0.55535
## 
| Adam | epoch: 006 | loss: 0.55535 -- iter: 0944/1309
## Training Step: 470  | total loss: 0.53989
## 
| Adam | epoch: 006 | loss: 0.53989 -- iter: 0960/1309
## Training Step: 471  | total loss: 0.54917
## 
| Adam | epoch: 006 | loss: 0.54917 -- iter: 0976/1309
## Training Step: 472  | total loss: 0.51833
## 
| Adam | epoch: 006 | loss: 0.51833 -- iter: 0992/1309
## Training Step: 473  | total loss: 0.49558
## 
| Adam | epoch: 006 | loss: 0.49558 -- iter: 1008/1309
## Training Step: 474  | total loss: 0.51622
## 
| Adam | epoch: 006 | loss: 0.51622 -- iter: 1024/1309
## Training Step: 475  | total loss: 0.52735
## 
| Adam | epoch: 006 | loss: 0.52735 -- iter: 1040/1309
## Training Step: 476  | total loss: 0.54198
## 
| Adam | epoch: 006 | loss: 0.54198 -- iter: 1056/1309
## Training Step: 477  | total loss: 0.53649
## 
| Adam | epoch: 006 | loss: 0.53649 -- iter: 1072/1309
## Training Step: 478  | total loss: 0.52915
## 
| Adam | epoch: 006 | loss: 0.52915 -- iter: 1088/1309
## Training Step: 479  | total loss: 0.52585
## 
| Adam | epoch: 006 | loss: 0.52585 -- iter: 1104/1309
## Training Step: 480  | total loss: 0.50843
## 
| Adam | epoch: 006 | loss: 0.50843 -- iter: 1120/1309
## Training Step: 481  | total loss: 0.50174
## 
| Adam | epoch: 006 | loss: 0.50174 -- iter: 1136/1309
## Training Step: 482  | total loss: 0.48733
## 
| Adam | epoch: 006 | loss: 0.48733 -- iter: 1152/1309
## Training Step: 483  | total loss: 0.49309
## 
| Adam | epoch: 006 | loss: 0.49309 -- iter: 1168/1309
## Training Step: 484  | total loss: 0.48069
## 
| Adam | epoch: 006 | loss: 0.48069 -- iter: 1184/1309
## Training Step: 485  | total loss: 0.50192
## 
| Adam | epoch: 006 | loss: 0.50192 -- iter: 1200/1309
## Training Step: 486  | total loss: 0.49704
## 
| Adam | epoch: 006 | loss: 0.49704 -- iter: 1216/1309
## Training Step: 487  | total loss: 0.49197
## 
| Adam | epoch: 006 | loss: 0.49197 -- iter: 1232/1309
## Training Step: 488  | total loss: 0.52036
## 
| Adam | epoch: 006 | loss: 0.52036 -- iter: 1248/1309
## Training Step: 489  | total loss: 0.50685
## 
| Adam | epoch: 006 | loss: 0.50685 -- iter: 1264/1309
## Training Step: 490  | total loss: 0.51446
## 
| Adam | epoch: 006 | loss: 0.51446 -- iter: 1280/1309
## Training Step: 491  | total loss: 0.49641
## 
| Adam | epoch: 006 | loss: 0.49641 -- iter: 1296/1309
## Training Step: 492  | total loss: 0.48086
## 
| Adam | epoch: 006 | loss: 0.48086 -- iter: 1309/1309
## Training Step: 492  | total loss: 0.48086
## 
| Adam | epoch: 006 | loss: 0.48086 -- iter: 1309/1309
## --
## Training Step: 493  | total loss: 0.47611
## 
| Adam | epoch: 007 | loss: 0.47611 -- iter: 0016/1309
## Training Step: 494  | total loss: 0.48365
## 
| Adam | epoch: 007 | loss: 0.48365 -- iter: 0032/1309
## Training Step: 495  | total loss: 0.48764
## 
| Adam | epoch: 007 | loss: 0.48764 -- iter: 0048/1309
## Training Step: 496  | total loss: 0.48610
## 
| Adam | epoch: 007 | loss: 0.48610 -- iter: 0064/1309
## Training Step: 497  | total loss: 0.49741
## 
| Adam | epoch: 007 | loss: 0.49741 -- iter: 0080/1309
## Training Step: 498  | total loss: 0.48577
## 
| Adam | epoch: 007 | loss: 0.48577 -- iter: 0096/1309
## Training Step: 499  | total loss: 0.47444
## 
| Adam | epoch: 007 | loss: 0.47444 -- iter: 0112/1309
## Training Step: 500  | total loss: 0.48810
## 
| Adam | epoch: 007 | loss: 0.48810 -- iter: 0128/1309
## Training Step: 501  | total loss: 0.48331
## 
| Adam | epoch: 007 | loss: 0.48331 -- iter: 0144/1309
## Training Step: 502  | total loss: 0.48506
## 
| Adam | epoch: 007 | loss: 0.48506 -- iter: 0160/1309
## Training Step: 503  | total loss: 0.49404
## 
| Adam | epoch: 007 | loss: 0.49404 -- iter: 0176/1309
## Training Step: 504  | total loss: 0.52421
## 
| Adam | epoch: 007 | loss: 0.52421 -- iter: 0192/1309
## Training Step: 505  | total loss: 0.52734
## 
| Adam | epoch: 007 | loss: 0.52734 -- iter: 0208/1309
## Training Step: 506  | total loss: 0.51502
## 
| Adam | epoch: 007 | loss: 0.51502 -- iter: 0224/1309
## Training Step: 507  | total loss: 0.50748
## 
| Adam | epoch: 007 | loss: 0.50748 -- iter: 0240/1309
## Training Step: 508  | total loss: 0.51850
## 
| Adam | epoch: 007 | loss: 0.51850 -- iter: 0256/1309
## Training Step: 509  | total loss: 0.51918
## 
| Adam | epoch: 007 | loss: 0.51918 -- iter: 0272/1309
## Training Step: 510  | total loss: 0.52977
## 
| Adam | epoch: 007 | loss: 0.52977 -- iter: 0288/1309
## Training Step: 511  | total loss: 0.54017
## 
| Adam | epoch: 007 | loss: 0.54017 -- iter: 0304/1309
## Training Step: 512  | total loss: 0.52931
## 
| Adam | epoch: 007 | loss: 0.52931 -- iter: 0320/1309
## Training Step: 513  | total loss: 0.55541
## 
| Adam | epoch: 007 | loss: 0.55541 -- iter: 0336/1309
## Training Step: 514  | total loss: 0.55370
## 
| Adam | epoch: 007 | loss: 0.55370 -- iter: 0352/1309
## Training Step: 515  | total loss: 0.56110
## 
| Adam | epoch: 007 | loss: 0.56110 -- iter: 0368/1309
## Training Step: 516  | total loss: 0.55934
## 
| Adam | epoch: 007 | loss: 0.55934 -- iter: 0384/1309
## Training Step: 517  | total loss: 0.55248
## 
| Adam | epoch: 007 | loss: 0.55248 -- iter: 0400/1309
## Training Step: 518  | total loss: 0.55707
## 
| Adam | epoch: 007 | loss: 0.55707 -- iter: 0416/1309
## Training Step: 519  | total loss: 0.56100
## 
| Adam | epoch: 007 | loss: 0.56100 -- iter: 0432/1309
## Training Step: 520  | total loss: 0.54835
## 
| Adam | epoch: 007 | loss: 0.54835 -- iter: 0448/1309
## Training Step: 521  | total loss: 0.53571
## 
| Adam | epoch: 007 | loss: 0.53571 -- iter: 0464/1309
## Training Step: 522  | total loss: 0.54911
## 
| Adam | epoch: 007 | loss: 0.54911 -- iter: 0480/1309
## Training Step: 523  | total loss: 0.54980
## 
| Adam | epoch: 007 | loss: 0.54980 -- iter: 0496/1309
## Training Step: 524  | total loss: 0.55034
## 
| Adam | epoch: 007 | loss: 0.55034 -- iter: 0512/1309
## Training Step: 525  | total loss: 0.55191
## 
| Adam | epoch: 007 | loss: 0.55191 -- iter: 0528/1309
## Training Step: 526  | total loss: 0.56211
## 
| Adam | epoch: 007 | loss: 0.56211 -- iter: 0544/1309
## Training Step: 527  | total loss: 0.56175
## 
| Adam | epoch: 007 | loss: 0.56175 -- iter: 0560/1309
## Training Step: 528  | total loss: 0.54596
## 
| Adam | epoch: 007 | loss: 0.54596 -- iter: 0576/1309
## Training Step: 529  | total loss: 0.53432
## 
| Adam | epoch: 007 | loss: 0.53432 -- iter: 0592/1309
## Training Step: 530  | total loss: 0.53672
## 
| Adam | epoch: 007 | loss: 0.53672 -- iter: 0608/1309
## Training Step: 531  | total loss: 0.53149
## 
| Adam | epoch: 007 | loss: 0.53149 -- iter: 0624/1309
## Training Step: 532  | total loss: 0.51577
## 
| Adam | epoch: 007 | loss: 0.51577 -- iter: 0640/1309
## Training Step: 533  | total loss: 0.49904
## 
| Adam | epoch: 007 | loss: 0.49904 -- iter: 0656/1309
## Training Step: 534  | total loss: 0.49564
## 
| Adam | epoch: 007 | loss: 0.49564 -- iter: 0672/1309
## Training Step: 535  | total loss: 0.49349
## 
| Adam | epoch: 007 | loss: 0.49349 -- iter: 0688/1309
## Training Step: 536  | total loss: 0.52466
## 
| Adam | epoch: 007 | loss: 0.52466 -- iter: 0704/1309
## Training Step: 537  | total loss: 0.52123
## 
| Adam | epoch: 007 | loss: 0.52123 -- iter: 0720/1309
## Training Step: 538  | total loss: 0.51911
## 
| Adam | epoch: 007 | loss: 0.51911 -- iter: 0736/1309
## Training Step: 539  | total loss: 0.50762
## 
| Adam | epoch: 007 | loss: 0.50762 -- iter: 0752/1309
## Training Step: 540  | total loss: 0.52476
## 
| Adam | epoch: 007 | loss: 0.52476 -- iter: 0768/1309
## Training Step: 541  | total loss: 0.54955
## 
| Adam | epoch: 007 | loss: 0.54955 -- iter: 0784/1309
## Training Step: 542  | total loss: 0.53428
## 
| Adam | epoch: 007 | loss: 0.53428 -- iter: 0800/1309
## Training Step: 543  | total loss: 0.51214
## 
| Adam | epoch: 007 | loss: 0.51214 -- iter: 0816/1309
## Training Step: 544  | total loss: 0.49665
## 
| Adam | epoch: 007 | loss: 0.49665 -- iter: 0832/1309
## Training Step: 545  | total loss: 0.48487
## 
| Adam | epoch: 007 | loss: 0.48487 -- iter: 0848/1309
## Training Step: 546  | total loss: 0.52839
## 
| Adam | epoch: 007 | loss: 0.52839 -- iter: 0864/1309
## Training Step: 547  | total loss: 0.53182
## 
| Adam | epoch: 007 | loss: 0.53182 -- iter: 0880/1309
## Training Step: 548  | total loss: 0.52458
## 
| Adam | epoch: 007 | loss: 0.52458 -- iter: 0896/1309
## Training Step: 549  | total loss: 0.52640
## 
| Adam | epoch: 007 | loss: 0.52640 -- iter: 0912/1309
## Training Step: 550  | total loss: 0.50974
## 
| Adam | epoch: 007 | loss: 0.50974 -- iter: 0928/1309
## Training Step: 551  | total loss: 0.50546
## 
| Adam | epoch: 007 | loss: 0.50546 -- iter: 0944/1309
## Training Step: 552  | total loss: 0.50031
## 
| Adam | epoch: 007 | loss: 0.50031 -- iter: 0960/1309
## Training Step: 553  | total loss: 0.50430
## 
| Adam | epoch: 007 | loss: 0.50430 -- iter: 0976/1309
## Training Step: 554  | total loss: 0.49321
## 
| Adam | epoch: 007 | loss: 0.49321 -- iter: 0992/1309
## Training Step: 555  | total loss: 0.51029
## 
| Adam | epoch: 007 | loss: 0.51029 -- iter: 1008/1309
## Training Step: 556  | total loss: 0.51622
## 
| Adam | epoch: 007 | loss: 0.51622 -- iter: 1024/1309
## Training Step: 557  | total loss: 0.49646
## 
| Adam | epoch: 007 | loss: 0.49646 -- iter: 1040/1309
## Training Step: 558  | total loss: 0.50122
## 
| Adam | epoch: 007 | loss: 0.50122 -- iter: 1056/1309
## Training Step: 559  | total loss: 0.50512
## 
| Adam | epoch: 007 | loss: 0.50512 -- iter: 1072/1309
## Training Step: 560  | total loss: 0.51182
## 
| Adam | epoch: 007 | loss: 0.51182 -- iter: 1088/1309
## Training Step: 561  | total loss: 0.51462
## 
| Adam | epoch: 007 | loss: 0.51462 -- iter: 1104/1309
## Training Step: 562  | total loss: 0.53172
## 
| Adam | epoch: 007 | loss: 0.53172 -- iter: 1120/1309
## Training Step: 563  | total loss: 0.54427
## 
| Adam | epoch: 007 | loss: 0.54427 -- iter: 1136/1309
## Training Step: 564  | total loss: 0.53862
## 
| Adam | epoch: 007 | loss: 0.53862 -- iter: 1152/1309
## Training Step: 565  | total loss: 0.52567
## 
| Adam | epoch: 007 | loss: 0.52567 -- iter: 1168/1309
## Training Step: 566  | total loss: 0.51571
## 
| Adam | epoch: 007 | loss: 0.51571 -- iter: 1184/1309
## Training Step: 567  | total loss: 0.49881
## 
| Adam | epoch: 007 | loss: 0.49881 -- iter: 1200/1309
## Training Step: 568  | total loss: 0.48347
## 
| Adam | epoch: 007 | loss: 0.48347 -- iter: 1216/1309
## Training Step: 569  | total loss: 0.50948
## 
| Adam | epoch: 007 | loss: 0.50948 -- iter: 1232/1309
## Training Step: 570  | total loss: 0.51835
## 
| Adam | epoch: 007 | loss: 0.51835 -- iter: 1248/1309
## Training Step: 571  | total loss: 0.55805
## 
| Adam | epoch: 007 | loss: 0.55805 -- iter: 1264/1309
## Training Step: 572  | total loss: 0.53673
## 
| Adam | epoch: 007 | loss: 0.53673 -- iter: 1280/1309
## Training Step: 573  | total loss: 0.52719
## 
| Adam | epoch: 007 | loss: 0.52719 -- iter: 1296/1309
## Training Step: 574  | total loss: 0.51548
## 
| Adam | epoch: 007 | loss: 0.51548 -- iter: 1309/1309
## Training Step: 574  | total loss: 0.51548
## 
| Adam | epoch: 007 | loss: 0.51548 -- iter: 1309/1309
## --
## Training Step: 575  | total loss: 0.51047
## 
| Adam | epoch: 008 | loss: 0.51047 -- iter: 0016/1309
## Training Step: 576  | total loss: 0.49824
## 
| Adam | epoch: 008 | loss: 0.49824 -- iter: 0032/1309
## Training Step: 577  | total loss: 0.48585
## 
| Adam | epoch: 008 | loss: 0.48585 -- iter: 0048/1309
## Training Step: 578  | total loss: 0.47093
## 
| Adam | epoch: 008 | loss: 0.47093 -- iter: 0064/1309
## Training Step: 579  | total loss: 0.47396
## 
| Adam | epoch: 008 | loss: 0.47396 -- iter: 0080/1309
## Training Step: 580  | total loss: 0.50810
## 
| Adam | epoch: 008 | loss: 0.50810 -- iter: 0096/1309
## Training Step: 581  | total loss: 0.52301
## 
| Adam | epoch: 008 | loss: 0.52301 -- iter: 0112/1309
## Training Step: 582  | total loss: 0.53618
## 
| Adam | epoch: 008 | loss: 0.53618 -- iter: 0128/1309
## Training Step: 583  | total loss: 0.53567
## 
| Adam | epoch: 008 | loss: 0.53567 -- iter: 0144/1309
## Training Step: 584  | total loss: 0.57043
## 
| Adam | epoch: 008 | loss: 0.57043 -- iter: 0160/1309
## Training Step: 585  | total loss: 0.53909
## 
| Adam | epoch: 008 | loss: 0.53909 -- iter: 0176/1309
## Training Step: 586  | total loss: 0.52573
## 
| Adam | epoch: 008 | loss: 0.52573 -- iter: 0192/1309
## Training Step: 587  | total loss: 0.52242
## 
| Adam | epoch: 008 | loss: 0.52242 -- iter: 0208/1309
## Training Step: 588  | total loss: 0.53885
## 
| Adam | epoch: 008 | loss: 0.53885 -- iter: 0224/1309
## Training Step: 589  | total loss: 0.53656
## 
| Adam | epoch: 008 | loss: 0.53656 -- iter: 0240/1309
## Training Step: 590  | total loss: 0.53133
## 
| Adam | epoch: 008 | loss: 0.53133 -- iter: 0256/1309
## Training Step: 591  | total loss: 0.55099
## 
| Adam | epoch: 008 | loss: 0.55099 -- iter: 0272/1309
## Training Step: 592  | total loss: 0.53345
## 
| Adam | epoch: 008 | loss: 0.53345 -- iter: 0288/1309
## Training Step: 593  | total loss: 0.53150
## 
| Adam | epoch: 008 | loss: 0.53150 -- iter: 0304/1309
## Training Step: 594  | total loss: 0.52391
## 
| Adam | epoch: 008 | loss: 0.52391 -- iter: 0320/1309
## Training Step: 595  | total loss: 0.52205
## 
| Adam | epoch: 008 | loss: 0.52205 -- iter: 0336/1309
## Training Step: 596  | total loss: 0.54782
## 
| Adam | epoch: 008 | loss: 0.54782 -- iter: 0352/1309
## Training Step: 597  | total loss: 0.55612
## 
| Adam | epoch: 008 | loss: 0.55612 -- iter: 0368/1309
## Training Step: 598  | total loss: 0.54268
## 
| Adam | epoch: 008 | loss: 0.54268 -- iter: 0384/1309
## Training Step: 599  | total loss: 0.52596
## 
| Adam | epoch: 008 | loss: 0.52596 -- iter: 0400/1309
## Training Step: 600  | total loss: 0.50451
## 
| Adam | epoch: 008 | loss: 0.50451 -- iter: 0416/1309
## Training Step: 601  | total loss: 0.49544
## 
| Adam | epoch: 008 | loss: 0.49544 -- iter: 0432/1309
## Training Step: 602  | total loss: 0.48945
## 
| Adam | epoch: 008 | loss: 0.48945 -- iter: 0448/1309
## Training Step: 603  | total loss: 0.50750
## 
| Adam | epoch: 008 | loss: 0.50750 -- iter: 0464/1309
## Training Step: 604  | total loss: 0.50369
## 
| Adam | epoch: 008 | loss: 0.50369 -- iter: 0480/1309
## Training Step: 605  | total loss: 0.48871
## 
| Adam | epoch: 008 | loss: 0.48871 -- iter: 0496/1309
## Training Step: 606  | total loss: 0.51515
## 
| Adam | epoch: 008 | loss: 0.51515 -- iter: 0512/1309
## Training Step: 607  | total loss: 0.51519
## 
| Adam | epoch: 008 | loss: 0.51519 -- iter: 0528/1309
## Training Step: 608  | total loss: 0.49730
## 
| Adam | epoch: 008 | loss: 0.49730 -- iter: 0544/1309
## Training Step: 609  | total loss: 0.48156
## 
| Adam | epoch: 008 | loss: 0.48156 -- iter: 0560/1309
## Training Step: 610  | total loss: 0.51553
## 
| Adam | epoch: 008 | loss: 0.51553 -- iter: 0576/1309
## Training Step: 611  | total loss: 0.52549
## 
| Adam | epoch: 008 | loss: 0.52549 -- iter: 0592/1309
## Training Step: 612  | total loss: 0.52168
## 
| Adam | epoch: 008 | loss: 0.52168 -- iter: 0608/1309
## Training Step: 613  | total loss: 0.52045
## 
| Adam | epoch: 008 | loss: 0.52045 -- iter: 0624/1309
## Training Step: 614  | total loss: 0.51196
## 
| Adam | epoch: 008 | loss: 0.51196 -- iter: 0640/1309
## Training Step: 615  | total loss: 0.51618
## 
| Adam | epoch: 008 | loss: 0.51618 -- iter: 0656/1309
## Training Step: 616  | total loss: 0.51351
## 
| Adam | epoch: 008 | loss: 0.51351 -- iter: 0672/1309
## Training Step: 617  | total loss: 0.52318
## 
| Adam | epoch: 008 | loss: 0.52318 -- iter: 0688/1309
## Training Step: 618  | total loss: 0.54698
## 
| Adam | epoch: 008 | loss: 0.54698 -- iter: 0704/1309
## Training Step: 619  | total loss: 0.53689
## 
| Adam | epoch: 008 | loss: 0.53689 -- iter: 0720/1309
## Training Step: 620  | total loss: 0.52412
## 
| Adam | epoch: 008 | loss: 0.52412 -- iter: 0736/1309
## Training Step: 621  | total loss: 0.51648
## 
| Adam | epoch: 008 | loss: 0.51648 -- iter: 0752/1309
## Training Step: 622  | total loss: 0.49226
## 
| Adam | epoch: 008 | loss: 0.49226 -- iter: 0768/1309
## Training Step: 623  | total loss: 0.51134
## 
| Adam | epoch: 008 | loss: 0.51134 -- iter: 0784/1309
## Training Step: 624  | total loss: 0.50075
## 
| Adam | epoch: 008 | loss: 0.50075 -- iter: 0800/1309
## Training Step: 625  | total loss: 0.51032
## 
| Adam | epoch: 008 | loss: 0.51032 -- iter: 0816/1309
## Training Step: 626  | total loss: 0.51527
## 
| Adam | epoch: 008 | loss: 0.51527 -- iter: 0832/1309
## Training Step: 627  | total loss: 0.52213
## 
| Adam | epoch: 008 | loss: 0.52213 -- iter: 0848/1309
## Training Step: 628  | total loss: 0.51785
## 
| Adam | epoch: 008 | loss: 0.51785 -- iter: 0864/1309
## Training Step: 629  | total loss: 0.51411
## 
| Adam | epoch: 008 | loss: 0.51411 -- iter: 0880/1309
## Training Step: 630  | total loss: 0.50180
## 
| Adam | epoch: 008 | loss: 0.50180 -- iter: 0896/1309
## Training Step: 631  | total loss: 0.52669
## 
| Adam | epoch: 008 | loss: 0.52669 -- iter: 0912/1309
## Training Step: 632  | total loss: 0.51004
## 
| Adam | epoch: 008 | loss: 0.51004 -- iter: 0928/1309
## Training Step: 633  | total loss: 0.50625
## 
| Adam | epoch: 008 | loss: 0.50625 -- iter: 0944/1309
## Training Step: 634  | total loss: 0.49660
## 
| Adam | epoch: 008 | loss: 0.49660 -- iter: 0960/1309
## Training Step: 635  | total loss: 0.52469
## 
| Adam | epoch: 008 | loss: 0.52469 -- iter: 0976/1309
## Training Step: 636  | total loss: 0.52406
## 
| Adam | epoch: 008 | loss: 0.52406 -- iter: 0992/1309
## Training Step: 637  | total loss: 0.52382
## 
| Adam | epoch: 008 | loss: 0.52382 -- iter: 1008/1309
## Training Step: 638  | total loss: 0.50497
## 
| Adam | epoch: 008 | loss: 0.50497 -- iter: 1024/1309
## Training Step: 639  | total loss: 0.48558
## 
| Adam | epoch: 008 | loss: 0.48558 -- iter: 1040/1309
## Training Step: 640  | total loss: 0.49089
## 
| Adam | epoch: 008 | loss: 0.49089 -- iter: 1056/1309
## Training Step: 641  | total loss: 0.51976
## 
| Adam | epoch: 008 | loss: 0.51976 -- iter: 1072/1309
## Training Step: 642  | total loss: 0.50732
## 
| Adam | epoch: 008 | loss: 0.50732 -- iter: 1088/1309
## Training Step: 643  | total loss: 0.50626
## 
| Adam | epoch: 008 | loss: 0.50626 -- iter: 1104/1309
## Training Step: 644  | total loss: 0.51667
## 
| Adam | epoch: 008 | loss: 0.51667 -- iter: 1120/1309
## Training Step: 645  | total loss: 0.52002
## 
| Adam | epoch: 008 | loss: 0.52002 -- iter: 1136/1309
## Training Step: 646  | total loss: 0.52070
## 
| Adam | epoch: 008 | loss: 0.52070 -- iter: 1152/1309
## Training Step: 647  | total loss: 0.51217
## 
| Adam | epoch: 008 | loss: 0.51217 -- iter: 1168/1309
## Training Step: 648  | total loss: 0.50714
## 
| Adam | epoch: 008 | loss: 0.50714 -- iter: 1184/1309
## Training Step: 649  | total loss: 0.49981
## 
| Adam | epoch: 008 | loss: 0.49981 -- iter: 1200/1309
## Training Step: 650  | total loss: 0.49141
## 
| Adam | epoch: 008 | loss: 0.49141 -- iter: 1216/1309
## Training Step: 651  | total loss: 0.48356
## 
| Adam | epoch: 008 | loss: 0.48356 -- iter: 1232/1309
## Training Step: 652  | total loss: 0.48347
## 
| Adam | epoch: 008 | loss: 0.48347 -- iter: 1248/1309
## Training Step: 653  | total loss: 0.49187
## 
| Adam | epoch: 008 | loss: 0.49187 -- iter: 1264/1309
## Training Step: 654  | total loss: 0.47813
## 
| Adam | epoch: 008 | loss: 0.47813 -- iter: 1280/1309
## Training Step: 655  | total loss: 0.49518
## 
| Adam | epoch: 008 | loss: 0.49518 -- iter: 1296/1309
## Training Step: 656  | total loss: 0.49155
## 
| Adam | epoch: 008 | loss: 0.49155 -- iter: 1309/1309
## Training Step: 656  | total loss: 0.49155
## 
| Adam | epoch: 008 | loss: 0.49155 -- iter: 1309/1309
## --
## Training Step: 657  | total loss: 0.47796
## 
| Adam | epoch: 009 | loss: 0.47796 -- iter: 0016/1309
## Training Step: 658  | total loss: 0.46702
## 
| Adam | epoch: 009 | loss: 0.46702 -- iter: 0032/1309
## Training Step: 659  | total loss: 0.45858
## 
| Adam | epoch: 009 | loss: 0.45858 -- iter: 0048/1309
## Training Step: 660  | total loss: 0.47353
## 
| Adam | epoch: 009 | loss: 0.47353 -- iter: 0064/1309
## Training Step: 661  | total loss: 0.47999
## 
| Adam | epoch: 009 | loss: 0.47999 -- iter: 0080/1309
## Training Step: 662  | total loss: 0.45888
## 
| Adam | epoch: 009 | loss: 0.45888 -- iter: 0096/1309
## Training Step: 663  | total loss: 0.44461
## 
| Adam | epoch: 009 | loss: 0.44461 -- iter: 0112/1309
## Training Step: 664  | total loss: 0.44976
## 
| Adam | epoch: 009 | loss: 0.44976 -- iter: 0128/1309
## Training Step: 665  | total loss: 0.45381
## 
| Adam | epoch: 009 | loss: 0.45381 -- iter: 0144/1309
## Training Step: 666  | total loss: 0.46190
## 
| Adam | epoch: 009 | loss: 0.46190 -- iter: 0160/1309
## Training Step: 667  | total loss: 0.46204
## 
| Adam | epoch: 009 | loss: 0.46204 -- iter: 0176/1309
## Training Step: 668  | total loss: 0.45657
## 
| Adam | epoch: 009 | loss: 0.45657 -- iter: 0192/1309
## Training Step: 669  | total loss: 0.45243
## 
| Adam | epoch: 009 | loss: 0.45243 -- iter: 0208/1309
## Training Step: 670  | total loss: 0.44908
## 
| Adam | epoch: 009 | loss: 0.44908 -- iter: 0224/1309
## Training Step: 671  | total loss: 0.47022
## 
| Adam | epoch: 009 | loss: 0.47022 -- iter: 0240/1309
## Training Step: 672  | total loss: 0.52158
## 
| Adam | epoch: 009 | loss: 0.52158 -- iter: 0256/1309
## Training Step: 673  | total loss: 0.52209
## 
| Adam | epoch: 009 | loss: 0.52209 -- iter: 0272/1309
## Training Step: 674  | total loss: 0.52726
## 
| Adam | epoch: 009 | loss: 0.52726 -- iter: 0288/1309
## Training Step: 675  | total loss: 0.59201
## 
| Adam | epoch: 009 | loss: 0.59201 -- iter: 0304/1309
## Training Step: 676  | total loss: 0.56785
## 
| Adam | epoch: 009 | loss: 0.56785 -- iter: 0320/1309
## Training Step: 677  | total loss: 0.55726
## 
| Adam | epoch: 009 | loss: 0.55726 -- iter: 0336/1309
## Training Step: 678  | total loss: 0.52719
## 
| Adam | epoch: 009 | loss: 0.52719 -- iter: 0352/1309
## Training Step: 679  | total loss: 0.57051
## 
| Adam | epoch: 009 | loss: 0.57051 -- iter: 0368/1309
## Training Step: 680  | total loss: 0.55395
## 
| Adam | epoch: 009 | loss: 0.55395 -- iter: 0384/1309
## Training Step: 681  | total loss: 0.53459
## 
| Adam | epoch: 009 | loss: 0.53459 -- iter: 0400/1309
## Training Step: 682  | total loss: 0.54440
## 
| Adam | epoch: 009 | loss: 0.54440 -- iter: 0416/1309
## Training Step: 683  | total loss: 0.55039
## 
| Adam | epoch: 009 | loss: 0.55039 -- iter: 0432/1309
## Training Step: 684  | total loss: 0.54627
## 
| Adam | epoch: 009 | loss: 0.54627 -- iter: 0448/1309
## Training Step: 685  | total loss: 0.52749
## 
| Adam | epoch: 009 | loss: 0.52749 -- iter: 0464/1309
## Training Step: 686  | total loss: 0.53017
## 
| Adam | epoch: 009 | loss: 0.53017 -- iter: 0480/1309
## Training Step: 687  | total loss: 0.51743
## 
| Adam | epoch: 009 | loss: 0.51743 -- iter: 0496/1309
## Training Step: 688  | total loss: 0.53300
## 
| Adam | epoch: 009 | loss: 0.53300 -- iter: 0512/1309
## Training Step: 689  | total loss: 0.53367
## 
| Adam | epoch: 009 | loss: 0.53367 -- iter: 0528/1309
## Training Step: 690  | total loss: 0.51895
## 
| Adam | epoch: 009 | loss: 0.51895 -- iter: 0544/1309
## Training Step: 691  | total loss: 0.51068
## 
| Adam | epoch: 009 | loss: 0.51068 -- iter: 0560/1309
## Training Step: 692  | total loss: 0.49851
## 
| Adam | epoch: 009 | loss: 0.49851 -- iter: 0576/1309
## Training Step: 693  | total loss: 0.48340
## 
| Adam | epoch: 009 | loss: 0.48340 -- iter: 0592/1309
## Training Step: 694  | total loss: 0.50324
## 
| Adam | epoch: 009 | loss: 0.50324 -- iter: 0608/1309
## Training Step: 695  | total loss: 0.51604
## 
| Adam | epoch: 009 | loss: 0.51604 -- iter: 0624/1309
## Training Step: 696  | total loss: 0.50107
## 
| Adam | epoch: 009 | loss: 0.50107 -- iter: 0640/1309
## Training Step: 697  | total loss: 0.48031
## 
| Adam | epoch: 009 | loss: 0.48031 -- iter: 0656/1309
## Training Step: 698  | total loss: 0.50270
## 
| Adam | epoch: 009 | loss: 0.50270 -- iter: 0672/1309
## Training Step: 699  | total loss: 0.52066
## 
| Adam | epoch: 009 | loss: 0.52066 -- iter: 0688/1309
## Training Step: 700  | total loss: 0.51064
## 
| Adam | epoch: 009 | loss: 0.51064 -- iter: 0704/1309
## Training Step: 701  | total loss: 0.51972
## 
| Adam | epoch: 009 | loss: 0.51972 -- iter: 0720/1309
## Training Step: 702  | total loss: 0.51759
## 
| Adam | epoch: 009 | loss: 0.51759 -- iter: 0736/1309
## Training Step: 703  | total loss: 0.51469
## 
| Adam | epoch: 009 | loss: 0.51469 -- iter: 0752/1309
## Training Step: 704  | total loss: 0.51418
## 
| Adam | epoch: 009 | loss: 0.51418 -- iter: 0768/1309
## Training Step: 705  | total loss: 0.54604
## 
| Adam | epoch: 009 | loss: 0.54604 -- iter: 0784/1309
## Training Step: 706  | total loss: 0.53391
## 
| Adam | epoch: 009 | loss: 0.53391 -- iter: 0800/1309
## Training Step: 707  | total loss: 0.51728
## 
| Adam | epoch: 009 | loss: 0.51728 -- iter: 0816/1309
## Training Step: 708  | total loss: 0.54406
## 
| Adam | epoch: 009 | loss: 0.54406 -- iter: 0832/1309
## Training Step: 709  | total loss: 0.57167
## 
| Adam | epoch: 009 | loss: 0.57167 -- iter: 0848/1309
## Training Step: 710  | total loss: 0.55326
## 
| Adam | epoch: 009 | loss: 0.55326 -- iter: 0864/1309
## Training Step: 711  | total loss: 0.56086
## 
| Adam | epoch: 009 | loss: 0.56086 -- iter: 0880/1309
## Training Step: 712  | total loss: 0.53749
## 
| Adam | epoch: 009 | loss: 0.53749 -- iter: 0896/1309
## Training Step: 713  | total loss: 0.53581
## 
| Adam | epoch: 009 | loss: 0.53581 -- iter: 0912/1309
## Training Step: 714  | total loss: 0.54089
## 
| Adam | epoch: 009 | loss: 0.54089 -- iter: 0928/1309
## Training Step: 715  | total loss: 0.53891
## 
| Adam | epoch: 009 | loss: 0.53891 -- iter: 0944/1309
## Training Step: 716  | total loss: 0.53494
## 
| Adam | epoch: 009 | loss: 0.53494 -- iter: 0960/1309
## Training Step: 717  | total loss: 0.52106
## 
| Adam | epoch: 009 | loss: 0.52106 -- iter: 0976/1309
## Training Step: 718  | total loss: 0.53275
## 
| Adam | epoch: 009 | loss: 0.53275 -- iter: 0992/1309
## Training Step: 719  | total loss: 0.51099
## 
| Adam | epoch: 009 | loss: 0.51099 -- iter: 1008/1309
## Training Step: 720  | total loss: 0.49704
## 
| Adam | epoch: 009 | loss: 0.49704 -- iter: 1024/1309
## Training Step: 721  | total loss: 0.49086
## 
| Adam | epoch: 009 | loss: 0.49086 -- iter: 1040/1309
## Training Step: 722  | total loss: 0.48474
## 
| Adam | epoch: 009 | loss: 0.48474 -- iter: 1056/1309
## Training Step: 723  | total loss: 0.49056
## 
| Adam | epoch: 009 | loss: 0.49056 -- iter: 1072/1309
## Training Step: 724  | total loss: 0.48636
## 
| Adam | epoch: 009 | loss: 0.48636 -- iter: 1088/1309
## Training Step: 725  | total loss: 0.48294
## 
| Adam | epoch: 009 | loss: 0.48294 -- iter: 1104/1309
## Training Step: 726  | total loss: 0.46623
## 
| Adam | epoch: 009 | loss: 0.46623 -- iter: 1120/1309
## Training Step: 727  | total loss: 0.46982
## 
| Adam | epoch: 009 | loss: 0.46982 -- iter: 1136/1309
## Training Step: 728  | total loss: 0.46565
## 
| Adam | epoch: 009 | loss: 0.46565 -- iter: 1152/1309
## Training Step: 729  | total loss: 0.45926
## 
| Adam | epoch: 009 | loss: 0.45926 -- iter: 1168/1309
## Training Step: 730  | total loss: 0.44633
## 
| Adam | epoch: 009 | loss: 0.44633 -- iter: 1184/1309
## Training Step: 731  | total loss: 0.48148
## 
| Adam | epoch: 009 | loss: 0.48148 -- iter: 1200/1309
## Training Step: 732  | total loss: 0.47995
## 
| Adam | epoch: 009 | loss: 0.47995 -- iter: 1216/1309
## Training Step: 733  | total loss: 0.47528
## 
| Adam | epoch: 009 | loss: 0.47528 -- iter: 1232/1309
## Training Step: 734  | total loss: 0.46543
## 
| Adam | epoch: 009 | loss: 0.46543 -- iter: 1248/1309
## Training Step: 735  | total loss: 0.46429
## 
| Adam | epoch: 009 | loss: 0.46429 -- iter: 1264/1309
## Training Step: 736  | total loss: 0.47436
## 
| Adam | epoch: 009 | loss: 0.47436 -- iter: 1280/1309
## Training Step: 737  | total loss: 0.45810
## 
| Adam | epoch: 009 | loss: 0.45810 -- iter: 1296/1309
## Training Step: 738  | total loss: 0.46169
## 
| Adam | epoch: 009 | loss: 0.46169 -- iter: 1309/1309
## Training Step: 738  | total loss: 0.46169
## 
| Adam | epoch: 009 | loss: 0.46169 -- iter: 1309/1309
## --
## Training Step: 739  | total loss: 0.44133
## 
| Adam | epoch: 010 | loss: 0.44133 -- iter: 0016/1309
## Training Step: 740  | total loss: 0.50094
## 
| Adam | epoch: 010 | loss: 0.50094 -- iter: 0032/1309
## Training Step: 741  | total loss: 0.49312
## 
| Adam | epoch: 010 | loss: 0.49312 -- iter: 0048/1309
## Training Step: 742  | total loss: 0.56470
## 
| Adam | epoch: 010 | loss: 0.56470 -- iter: 0064/1309
## Training Step: 743  | total loss: 0.55681
## 
| Adam | epoch: 010 | loss: 0.55681 -- iter: 0080/1309
## Training Step: 744  | total loss: 0.55594
## 
| Adam | epoch: 010 | loss: 0.55594 -- iter: 0096/1309
## Training Step: 745  | total loss: 0.57815
## 
| Adam | epoch: 010 | loss: 0.57815 -- iter: 0112/1309
## Training Step: 746  | total loss: 0.54911
## 
| Adam | epoch: 010 | loss: 0.54911 -- iter: 0128/1309
## Training Step: 747  | total loss: 0.53880
## 
| Adam | epoch: 010 | loss: 0.53880 -- iter: 0144/1309
## Training Step: 748  | total loss: 0.52911
## 
| Adam | epoch: 010 | loss: 0.52911 -- iter: 0160/1309
## Training Step: 749  | total loss: 0.54085
## 
| Adam | epoch: 010 | loss: 0.54085 -- iter: 0176/1309
## Training Step: 750  | total loss: 0.52906
## 
| Adam | epoch: 010 | loss: 0.52906 -- iter: 0192/1309
## Training Step: 751  | total loss: 0.53248
## 
| Adam | epoch: 010 | loss: 0.53248 -- iter: 0208/1309
## Training Step: 752  | total loss: 0.51534
## 
| Adam | epoch: 010 | loss: 0.51534 -- iter: 0224/1309
## Training Step: 753  | total loss: 0.50973
## 
| Adam | epoch: 010 | loss: 0.50973 -- iter: 0240/1309
## Training Step: 754  | total loss: 0.50658
## 
| Adam | epoch: 010 | loss: 0.50658 -- iter: 0256/1309
## Training Step: 755  | total loss: 0.54838
## 
| Adam | epoch: 010 | loss: 0.54838 -- iter: 0272/1309
## Training Step: 756  | total loss: 0.51823
## 
| Adam | epoch: 010 | loss: 0.51823 -- iter: 0288/1309
## Training Step: 757  | total loss: 0.52120
## 
| Adam | epoch: 010 | loss: 0.52120 -- iter: 0304/1309
## Training Step: 758  | total loss: 0.56750
## 
| Adam | epoch: 010 | loss: 0.56750 -- iter: 0320/1309
## Training Step: 759  | total loss: 0.54743
## 
| Adam | epoch: 010 | loss: 0.54743 -- iter: 0336/1309
## Training Step: 760  | total loss: 0.55962
## 
| Adam | epoch: 010 | loss: 0.55962 -- iter: 0352/1309
## Training Step: 761  | total loss: 0.53260
## 
| Adam | epoch: 010 | loss: 0.53260 -- iter: 0368/1309
## Training Step: 762  | total loss: 0.55761
## 
| Adam | epoch: 010 | loss: 0.55761 -- iter: 0384/1309
## Training Step: 763  | total loss: 0.55355
## 
| Adam | epoch: 010 | loss: 0.55355 -- iter: 0400/1309
## Training Step: 764  | total loss: 0.56473
## 
| Adam | epoch: 010 | loss: 0.56473 -- iter: 0416/1309
## Training Step: 765  | total loss: 0.54852
## 
| Adam | epoch: 010 | loss: 0.54852 -- iter: 0432/1309
## Training Step: 766  | total loss: 0.51815
## 
| Adam | epoch: 010 | loss: 0.51815 -- iter: 0448/1309
## Training Step: 767  | total loss: 0.53459
## 
| Adam | epoch: 010 | loss: 0.53459 -- iter: 0464/1309
## Training Step: 768  | total loss: 0.55149
## 
| Adam | epoch: 010 | loss: 0.55149 -- iter: 0480/1309
## Training Step: 769  | total loss: 0.54886
## 
| Adam | epoch: 010 | loss: 0.54886 -- iter: 0496/1309
## Training Step: 770  | total loss: 0.57334
## 
| Adam | epoch: 010 | loss: 0.57334 -- iter: 0512/1309
## Training Step: 771  | total loss: 0.56305
## 
| Adam | epoch: 010 | loss: 0.56305 -- iter: 0528/1309
## Training Step: 772  | total loss: 0.56581
## 
| Adam | epoch: 010 | loss: 0.56581 -- iter: 0544/1309
## Training Step: 773  | total loss: 0.54776
## 
| Adam | epoch: 010 | loss: 0.54776 -- iter: 0560/1309
## Training Step: 774  | total loss: 0.57027
## 
| Adam | epoch: 010 | loss: 0.57027 -- iter: 0576/1309
## Training Step: 775  | total loss: 0.56468
## 
| Adam | epoch: 010 | loss: 0.56468 -- iter: 0592/1309
## Training Step: 776  | total loss: 0.56821
## 
| Adam | epoch: 010 | loss: 0.56821 -- iter: 0608/1309
## Training Step: 777  | total loss: 0.57312
## 
| Adam | epoch: 010 | loss: 0.57312 -- iter: 0624/1309
## Training Step: 778  | total loss: 0.57299
## 
| Adam | epoch: 010 | loss: 0.57299 -- iter: 0640/1309
## Training Step: 779  | total loss: 0.58302
## 
| Adam | epoch: 010 | loss: 0.58302 -- iter: 0656/1309
## Training Step: 780  | total loss: 0.60344
## 
| Adam | epoch: 010 | loss: 0.60344 -- iter: 0672/1309
## Training Step: 781  | total loss: 0.58825
## 
| Adam | epoch: 010 | loss: 0.58825 -- iter: 0688/1309
## Training Step: 782  | total loss: 0.60025
## 
| Adam | epoch: 010 | loss: 0.60025 -- iter: 0704/1309
## Training Step: 783  | total loss: 0.57954
## 
| Adam | epoch: 010 | loss: 0.57954 -- iter: 0720/1309
## Training Step: 784  | total loss: 0.58081
## 
| Adam | epoch: 010 | loss: 0.58081 -- iter: 0736/1309
## Training Step: 785  | total loss: 0.56436
## 
| Adam | epoch: 010 | loss: 0.56436 -- iter: 0752/1309
## Training Step: 786  | total loss: 0.55580
## 
| Adam | epoch: 010 | loss: 0.55580 -- iter: 0768/1309
## Training Step: 787  | total loss: 0.56632
## 
| Adam | epoch: 010 | loss: 0.56632 -- iter: 0784/1309
## Training Step: 788  | total loss: 0.57147
## 
| Adam | epoch: 010 | loss: 0.57147 -- iter: 0800/1309
## Training Step: 789  | total loss: 0.54566
## 
| Adam | epoch: 010 | loss: 0.54566 -- iter: 0816/1309
## Training Step: 790  | total loss: 0.54321
## 
| Adam | epoch: 010 | loss: 0.54321 -- iter: 0832/1309
## Training Step: 791  | total loss: 0.54763
## 
| Adam | epoch: 010 | loss: 0.54763 -- iter: 0848/1309
## Training Step: 792  | total loss: 0.54398
## 
| Adam | epoch: 010 | loss: 0.54398 -- iter: 0864/1309
## Training Step: 793  | total loss: 0.52236
## 
| Adam | epoch: 010 | loss: 0.52236 -- iter: 0880/1309
## Training Step: 794  | total loss: 0.53792
## 
| Adam | epoch: 010 | loss: 0.53792 -- iter: 0896/1309
## Training Step: 795  | total loss: 0.53808
## 
| Adam | epoch: 010 | loss: 0.53808 -- iter: 0912/1309
## Training Step: 796  | total loss: 0.53747
## 
| Adam | epoch: 010 | loss: 0.53747 -- iter: 0928/1309
## Training Step: 797  | total loss: 0.52337
## 
| Adam | epoch: 010 | loss: 0.52337 -- iter: 0944/1309
## Training Step: 798  | total loss: 0.50844
## 
| Adam | epoch: 010 | loss: 0.50844 -- iter: 0960/1309
## Training Step: 799  | total loss: 0.52519
## 
| Adam | epoch: 010 | loss: 0.52519 -- iter: 0976/1309
## Training Step: 800  | total loss: 0.53466
## 
| Adam | epoch: 010 | loss: 0.53466 -- iter: 0992/1309
## Training Step: 801  | total loss: 0.54050
## 
| Adam | epoch: 010 | loss: 0.54050 -- iter: 1008/1309
## Training Step: 802  | total loss: 0.53381
## 
| Adam | epoch: 010 | loss: 0.53381 -- iter: 1024/1309
## Training Step: 803  | total loss: 0.51315
## 
| Adam | epoch: 010 | loss: 0.51315 -- iter: 1040/1309
## Training Step: 804  | total loss: 0.51555
## 
| Adam | epoch: 010 | loss: 0.51555 -- iter: 1056/1309
## Training Step: 805  | total loss: 0.49844
## 
| Adam | epoch: 010 | loss: 0.49844 -- iter: 1072/1309
## Training Step: 806  | total loss: 0.48862
## 
| Adam | epoch: 010 | loss: 0.48862 -- iter: 1088/1309
## Training Step: 807  | total loss: 0.49002
## 
| Adam | epoch: 010 | loss: 0.49002 -- iter: 1104/1309
## Training Step: 808  | total loss: 0.48676
## 
| Adam | epoch: 010 | loss: 0.48676 -- iter: 1120/1309
## Training Step: 809  | total loss: 0.48852
## 
| Adam | epoch: 010 | loss: 0.48852 -- iter: 1136/1309
## Training Step: 810  | total loss: 0.46520
## 
| Adam | epoch: 010 | loss: 0.46520 -- iter: 1152/1309
## Training Step: 811  | total loss: 0.49851
## 
| Adam | epoch: 010 | loss: 0.49851 -- iter: 1168/1309
## Training Step: 812  | total loss: 0.50055
## 
| Adam | epoch: 010 | loss: 0.50055 -- iter: 1184/1309
## Training Step: 813  | total loss: 0.50702
## 
| Adam | epoch: 010 | loss: 0.50702 -- iter: 1200/1309
## Training Step: 814  | total loss: 0.49447
## 
| Adam | epoch: 010 | loss: 0.49447 -- iter: 1216/1309
## Training Step: 815  | total loss: 0.50918
## 
| Adam | epoch: 010 | loss: 0.50918 -- iter: 1232/1309
## Training Step: 816  | total loss: 0.51872
## 
| Adam | epoch: 010 | loss: 0.51872 -- iter: 1248/1309
## Training Step: 817  | total loss: 0.49703
## 
| Adam | epoch: 010 | loss: 0.49703 -- iter: 1264/1309
## Training Step: 818  | total loss: 0.47474
## 
| Adam | epoch: 010 | loss: 0.47474 -- iter: 1280/1309
## Training Step: 819  | total loss: 0.47872
## 
| Adam | epoch: 010 | loss: 0.47872 -- iter: 1296/1309
## Training Step: 820  | total loss: 0.46454
## 
| Adam | epoch: 010 | loss: 0.46454 -- iter: 1309/1309
## Training Step: 820  | total loss: 0.46454
## 
| Adam | epoch: 010 | loss: 0.46454 -- iter: 1309/1309
## --
## DiCaprio Surviving Rate: 0.0970936119556
## Winslet Surviving Rate: 0.938064098358

Build details

This was run in a local R session to start up this RStudio instance with the right libraries installed.

Use the latest version of googleComputeEngineR from github if you want to use get_dockerfolder("cloudDataLabR")

library(googleComputeEngineR)

## make an RStudio instance to base upon
vm <- gce_vm(template = "rstudio", 
             name = "r-datalab-build", 
             username = "mark", password = "mark1234", 
             predefined_type = "n1-standard-1")

## once RStudio loaded at the IP, build the Dockerfile below on instance
## this takes a while
docker_build(vm, dockerfolder = get_dockerfolder("cloudDataLabR"), new_image = "r-datalab")


## send to the Container Registry
gce_push_registry(vm, save_name = "datalab-r-image", image_name = "r-datalab")

## Can now launch instances using this image via:
vm2 <- gce_vm(template = "rstudio", 
              name = "r-datalab", 
              predefined_type = "n1-standard-1", 
              dynamic_image = gce_tag_container("datalab-r"), 
              username = "mark", password = "mark1234")

The Dockerfile used is below:

FROM rocker/hadleyverse
MAINTAINER Mark Edmondson (r@sunholo.com)

# install cron and nano and tensorflow and tflearn
RUN apt-get update && apt-get install -y \
    cron nano \
    python-pip python-dev libhdf5-dev \
    && pip install cython \
    && pip install numpy \
    && pip install pandas \
    && export TF_BINARY_URL=https://storage.googleapis.com/tensorflow/linux/cpu/tensorflow-0.11.0-cp27-none-linux_x86_64.whl \
    && pip install --upgrade $TF_BINARY_URL \
    && pip install git+https://github.com/tflearn/tflearn.git \

    && pip install feather-format \
    && pip install h5py \
    ## clean up
    && apt-get clean \ 
    && rm -rf /var/lib/apt/lists/ \ 
    && rm -rf /tmp/downloaded_packages/ /tmp/*.rds
    
## Install packages from CRAN
RUN install2.r --error \ 
    -r 'http://cran.rstudio.com' \
    googleAuthR googleAnalyticsR searchConsoleR googleCloudStorageR bigQueryR htmlwidgets feather rPython \
    ## install Github packages
    && Rscript -e "devtools::install_github(c('MarkEdmondson1234/youtubeAnalyticsR', 'MarkEdmondson1234/googleID', 'MarkEdmondson1234/googleAuthR'))" \
    && Rscript -e "devtools::install_github(c('bnosac/cronR'))" \
    && Rscript -e "devtools::install_github(c('rstudio/tensorflow'))" \
    ## clean up
    && rm -rf /tmp/downloaded_packages/ /tmp/*.rds \