keras

Automatically logging keras experiments

Comet.ml will add a Callback to your keras code without requiring you to do anything else. Just add these two lines of code to your Keras script.

from comet_ml import Experiment
experiment = Experiment()

# Your code.

For more information on getting started, see details on the Comet config file.

The Comet.ml Keras callback

Comet.ml logs your experiment through a callback executed when you run model.fit() in Keras. You do not need to add this callback yourself, we do it for you automatically. However, if you ever need to access the callback manually, simply call Experiment.get_keras_callback().

How to report manually

You can log additional parameters beyond what Comet.ml automatically collects using Experiment.log_parameter().

from comet_ml import Experiment

#create an experiment
experiment = Experiment(project_name='mnist')
import keras
batch_size = 128

experiment.log_parameter("batch_size", 128)

You can log an entirely customized list of parameters to your experiment by using Experiment.log_parameters().

from comet_ml import Experiment
from keras.models import Sequential
from keras.layers import Dense, Dropout

experiment = Experiment(project_name="my project name",
                        auto_param_logging=False)
batch_size = 128
num_classes = 10
epochs = 20

params={
    "batch_size":batch_size,
    "epochs":epochs,
    "num_classes":num_classes}

experiment.log_parameters(params)

Context Manager (Train/Test/Validate)

You can also log specific metrics to training and test contexts with our context managers Experiment.train(), Experiment.validate() and Experiment.test()

from comet_ml import Experiment
from keras.models import Sequential
from keras.layers import Dense, Dropout

experiment = Experiment(project_name="my project name",
                        auto_param_logging=True)
batch_size = 128
num_classes = 10
epochs = 20

params={
    "batch_size":batch_size,
    "epochs":epochs,
    "num_classes":num_classes}

experiment.log_parameters(params)

# define model here 

with experiment.train():
    history = model.fit(x_train, y_train,
                        batch_size=batch_size,
                        epochs=epochs,
                        verbose=1,
                        callbacks=[EarlyStopping(monitor='loss', min_delta=1e-4, patience=3, verbose=1, mode='auto')])

with experiment.test():
    loss, accuracy = model.evaluate(x_test, y_test)
    print(loss, accuracy)
    metrics = {
      'loss':loss,
      'accuracy':accuracy
    }
    experiment.log_metrics(metrics)

End-to-end example

Here is a simple end-to-end Keras example which uses a Dense NN on the MNIST dataset.

For more examples using Keras, see our Comet Examples Github repository.

from comet_ml import Experiment

#create an experiment with your api key
experiment = Experiment(project_name='mnist',
                        auto_param_logging=False)

import keras
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout
from keras.callbacks import EarlyStopping

batch_size = 128
num_classes = 10
epochs = 20
num_nodes = 64
optimizer = 'adam'
activation = 'relu'

# the data, shuffled and split between train and test sets
(x_train, y_train), (x_test, y_test) = mnist.load_data()

x_train = x_train.reshape(60000, 784)
x_test = x_test.reshape(10000, 784)
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')

# convert class vectors to binary class matrices
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)

#these will all get logged 
params={'batch_size':batch_size,
        'epochs':epochs,
        'layer1_type':'Dense',
        'layer1_num_nodes':num_nodes,
        'layer1_activation':activation,
        'optimizer':optimizer
}
model = Sequential()
model.add(Dense(num_nodes, activation='relu', input_shape=(784,)))
#model.add(Dense(256, activation='relu'))
model.add(Dense(num_classes, activation='softmax'))

#print model.summary() to preserve automatically in `Output` tab
print(model.summary())

model.compile(loss='categorical_crossentropy',
              optimizer=optimizer,
              metrics=['accuracy'])

#will log metrics with the prefix 'train_'
with experiment.train():
    history = model.fit(x_train, y_train,
                        batch_size=batch_size,
                        epochs=epochs,
                        verbose=1,
                        validation_data=(x_test, y_test),
                        callbacks=[EarlyStopping(monitor='val_loss', min_delta=1e-4,patience=3, verbose=1, mode='auto')])

#will log metrics with the prefix 'test_'
with experiment.test():
    loss, accuracy = model.evaluate(x_test, y_test)
    metrics = {
        'loss':loss,
        'accuracy':accuracy
    }
    experiment.log_metrics(metrics)

experiment.log_parameters(params)
experiment.log_dataset_hash(x_train) #creates and logs a hash of your data