Created
May 11, 2019 00:59
-
-
Save hhbyyh/375e7b7c972da735c698777e2debfe04 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import time | |
import tensorflow as tf | |
from bigdl.optim.optimizer import * | |
from zoo import init_nncontext | |
from zoo.pipeline.api.net import TFOptimizer, TFDataset | |
num_classes = 10 | |
def get_deep_cnn_model(): | |
# From: https://github.com/keras-team/keras/blob/master/examples/cifar10_cnn.py | |
images = tf.keras.layers.Input(shape=(32, 32, 3)) | |
x = tf.keras.layers.Conv2D(32, (3, 3), padding='same', activation='relu')(images) | |
x = tf.keras.layers.Conv2D(32, (3, 3), activation='relu')(x) | |
x = tf.keras.layers.MaxPooling2D(pool_size=(2, 2))(x) | |
x = tf.keras.layers.Dropout(0.25)(x) | |
x = tf.keras.layers.Conv2D(64, (3, 3), padding='same', activation='relu')(x) | |
x = tf.keras.layers.Conv2D(64, (3, 3), activation='relu')(x) | |
x = tf.keras.layers.MaxPooling2D(pool_size=(2, 2))(x) | |
x = tf.keras.layers.Dropout(0.25)(x) | |
x = tf.keras.layers.Flatten()(x) | |
x = tf.keras.layers.Dense(512, activation='relu')(x) | |
x = tf.keras.layers.Dropout(0.5)(x) | |
preds = tf.keras.layers.Dense(num_classes, activation='softmax')(x) | |
model = tf.keras.models.Model(inputs=images, outputs=preds) | |
model.compile(optimizer= tf.keras.optimizers.RMSprop(lr=0.0001, decay=1e-6), | |
loss='categorical_crossentropy', | |
metrics=['accuracy']) | |
return model | |
if __name__ == '__main__': | |
sc = init_nncontext() | |
from keras.datasets import cifar10 as kcifar10 | |
(x_train, y_train), (x_val, y_val) = kcifar10.load_data() | |
y_train = tf.keras.utils.to_categorical(y_train, num_classes) | |
y_val = tf.keras.utils.to_categorical(y_val, num_classes) | |
x_train_rdd = sc.parallelize(x_train) | |
y_train_rdd = sc.parallelize(y_train) | |
training_rdd = x_train_rdd.zip(y_train_rdd) \ | |
.map(lambda rec_tuple: [rec_tuple[0], np.array(rec_tuple[1])]) | |
x_val_rdd = sc.parallelize(x_val) | |
y_val_rdd = sc.parallelize(y_val) | |
val_rdd = x_val_rdd.zip(y_val_rdd) \ | |
.map(lambda rec_tuple: [rec_tuple[0], np.array(rec_tuple[1])]) | |
dataset = TFDataset.from_rdd(training_rdd, | |
names=["features", "labels"], | |
shapes=[[32, 32, 3], []], | |
types=[tf.float32, tf.int32], | |
batch_size=256, | |
val_rdd = val_rdd) | |
model = get_deep_cnn_model() | |
# create a optimizer | |
optimizer = TFOptimizer.from_keras(model, dataset) | |
optimizer.set_train_summary(TrainSummary("/tmp/az_lenet", "lenet")) | |
optimizer.set_val_summary(ValidationSummary("/tmp/az_lenet", "lenet")) | |
start = time.time() | |
# kick off training | |
optimizer.optimize(end_trigger=MaxEpoch(50)) | |
# accuracy increases on val data | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment