Skip to content

Commit

Permalink
Now UAI SDK support Keras training
Browse files Browse the repository at this point in the history
Supporting Keras 2.0.8
Adding Keras mnist training example code and data

Signed-off-by Xiang Song<[email protected]>
  • Loading branch information
宋翔 committed Sep 25, 2017
1 parent 41df36d commit 69aa210
Show file tree
Hide file tree
Showing 8 changed files with 180 additions and 9 deletions.
82 changes: 82 additions & 0 deletions examples/keras/train/mnist/code/mnist_cnn.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
'''Trains a simple convnet on the MNIST dataset.
Gets to 99.25% test accuracy after 12 epochs
(there is still a lot of margin for parameter tuning).
16 seconds per epoch on a GRID K520 GPU.
'''

from __future__ import print_function
import keras
from mnist_datasets import load_data
from keras.models import Sequential
from keras.layers import Dense, Dropout, Flatten
from keras.layers import Conv2D, MaxPooling2D
from keras import backend as K

import tensorflow as tf
from uaitrain.arch.tensorflow import uflag

FLAGS = tf.app.flags.FLAGS
flags = tf.app.flags

flags.DEFINE_integer("epochs", 12, "Number of epochs")

batch_size = 128
num_classes = 10
epochs = FLAGS.epochs

# input image dimensions
img_rows, img_cols = 28, 28

# the data, shuffled and split between train and test sets
(x_train, y_train), (x_test, y_test) = load_data(FLAGS.data_dir)

if K.image_data_format() == 'channels_first':
x_train = x_train.reshape(x_train.shape[0], 1, img_rows, img_cols)
x_test = x_test.reshape(x_test.shape[0], 1, img_rows, img_cols)
input_shape = (1, img_rows, img_cols)
else:
x_train = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1)
x_test = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1)
input_shape = (img_rows, img_cols, 1)

x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
print('x_train shape:', x_train.shape)
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')

# convert class vectors to binary class matrices
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)

model = Sequential()
model.add(Conv2D(32, kernel_size=(3, 3),
activation='relu',
input_shape=input_shape))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(num_classes, activation='softmax'))

model.compile(loss=keras.losses.categorical_crossentropy,
optimizer=keras.optimizers.Adadelta(),
metrics=['accuracy'])
tbCallBack = keras.callbacks.TensorBoard(log_dir=FLAGS.log_dir, histogram_freq=0, write_graph=True, write_images=True)

model.fit(x_train, y_train,
batch_size=batch_size,
epochs=epochs,
verbose=1,
validation_data=(x_test, y_test),
callbacks=[tbCallBack])
score = model.evaluate(x_test, y_test, verbose=0)
print('Test loss:', score[0])
print('Test accuracy:', score[1])

model_path = FLAGS.output_dir + '/mnist_model.h5'
model.save(model_path)
17 changes: 17 additions & 0 deletions examples/keras/train/mnist/code/mnist_datasets.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import numpy as np


def load_data(dataDir):
"""Loads the MNIST dataset.
# Arguments
path: path where to cache the dataset locally
(relative to ~/.keras/datasets).
# Returns
Tuple of Numpy arrays: `(x_train, y_train), (x_test, y_test)`.
"""
path = dataDir+'/mnist.npz'
f = np.load(path)
x_train, y_train = f['x_train'], f['y_train']
x_test, y_test = f['x_test'], f['y_test']
f.close()
return (x_train, y_train), (x_test, y_test)
Binary file added examples/keras/train/mnist/data/mnist.npz
Binary file not shown.
9 changes: 3 additions & 6 deletions uaitrain/operation/pack_docker_image/base_pack_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,7 @@ def _add_code_args(self, pack_parser):
code_parse.add_argument(
'--train_params',
type=str,
default="",
help='The params used in training')

cmd_gen_parse = pack_parser.add_argument_group(
Expand Down Expand Up @@ -275,15 +276,15 @@ def _gen_cpu_docker_cmd(self, pycmd):
"-v " + self.test_data_path + ":" + "/data/data " + \
"-v " + self.test_output_path + ":" + "/data/output " + \
self.user_cpu_image + " " + "/bin/bash -c " + \
"\"cd /data && /usr/bin/python " + pycmd + " " + "--work_dir=/data --data_dir=/data/data --output_dir=/data/output --log_dir=/data/output/log\""
"\"cd /data && /usr/bin/python " + pycmd + " " + "--work_dir=/data --data_dir=/data/data --output_dir=/data/output --log_dir=/data/output\""
return cpu_docker_cmd

def _gen_gpu_docker_cmd(self, pycmd):
gpu_docker_cmd = "sudo nvidia-docker run -it " + \
"-v " + self.test_data_path + ":" + "/data/data " + \
"-v " + self.test_output_path + ":" + "/data/output " + \
self.user_gpu_image + " " + "/bin/bash -c " + \
"\"cd /data && /usr/bin/python " + pycmd + " " + "--work_dir=/data --data_dir=/data/data --output_dir=/data/output --log_dir=/data/output/log\""
"\"cd /data && /usr/bin/python " + pycmd + " " + "--work_dir=/data --data_dir=/data/data --output_dir=/data/output --log_dir=/data/output\""
return gpu_docker_cmd

def _gen_run_cmd(self):
Expand Down Expand Up @@ -370,7 +371,3 @@ def cmd_run(self, args):
self.cpu_image = cpu_image_name

self._build_userimage()




4 changes: 2 additions & 2 deletions uaitrain/operation/pack_docker_image/caffe_pack_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,15 +55,15 @@ def _gen_cpu_docker_cmd(self, pycmd):
"-v " + self.test_data_path + ":" + "/data/data " + \
"-v " + self.test_output_path + ":" + "/data/output " + \
self.user_cpu_image + " " + "/bin/bash -c " + \
"\"cd /data && /usr/bin/python " + pycmd + " " + "--use_cpu=True --work_dir=/data --data_dir=/data/data --output_dir=/data/output --log_dir=/data/output/log\" "
"\"cd /data && /usr/bin/python " + pycmd + " " + "--use_cpu=True --work_dir=/data --data_dir=/data/data --output_dir=/data/output --log_dir=/data/output\" "
return cpu_docker_cmd

def _gen_gpu_docker_cmd(self, pycmd):
gpu_docker_cmd = "sudo nvidia-docker run -it " + \
"-v " + self.test_data_path + ":" + "/data/data " + \
"-v " + self.test_output_path + ":" + "/data/output " + \
self.user_gpu_image + " " + "/bin/bash -c " + \
"\"cd /data && /usr/bin/python " + pycmd + " " + "--use_cpu=False --num_gpus=1 --work_dir=/data --data_dir=/data/data --output_dir=/data/output --log_dir=/data/output/log\""
"\"cd /data && /usr/bin/python " + pycmd + " " + "--use_cpu=False --num_gpus=1 --work_dir=/data --data_dir=/data/data --output_dir=/data/output --log_dir=/data/output\""
return gpu_docker_cmd


Expand Down
39 changes: 39 additions & 0 deletions uaitrain/operation/pack_docker_image/keras_pack_op.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
# Copyright 2017 The UAI-SDK Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================

from uaitrain.operation.pack_docker_image.base_pack_op import BaseUAITrainDockerImagePackOp

class KerasUAITrainDockerImagePackOp(BaseUAITrainDockerImagePackOp):
"""docstring for ClassName"""
def __init__(self, parser):
super(KerasUAITrainDockerImagePackOp, self).__init__(parser)
self.ai_arch = "caffe"

def _parse_args(self, args):
super(KerasUAITrainDockerImagePackOp, self)._parse_args(args)

def _add_args(self):
super(KerasUAITrainDockerImagePackOp, self)._add_args()

def _gen_gpu_docker_cmd(self, pycmd):
gpu_docker_cmd = "sudo nvidia-docker run -it " + \
"-v " + self.test_data_path + ":" + "/data/data " + \
"-v " + self.test_output_path + ":" + "/data/output " + \
self.user_gpu_image + " " + "/bin/bash -c " + \
"\"cd /data && /usr/bin/python " + pycmd + " " + "--num_gpus=1 --work_dir=/data --data_dir=/data/data --output_dir=/data/output --log_dir=/data/output\""
return gpu_docker_cmd



2 changes: 1 addition & 1 deletion uaitrain/operation/pack_docker_image/tf_pack_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,6 @@ def _gen_gpu_docker_cmd(self, pycmd):
"-v " + self.test_data_path + ":" + "/data/data " + \
"-v " + self.test_output_path + ":" + "/data/output " + \
self.user_gpu_image + " " + "/bin/bash -c " + \
"\"cd /data && /usr/bin/python " + pycmd + " " + "--num_gpus=1 --work_dir=/data --data_dir=/data/data --output_dir=/data/output --log_dir=/data/output/log\""
"\"cd /data && /usr/bin/python " + pycmd + " " + "--num_gpus=1 --work_dir=/data --data_dir=/data/data --output_dir=/data/output --log_dir=/data/output\""
return gpu_docker_cmd

36 changes: 36 additions & 0 deletions uaitrain_tool/keras/keras_tool.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
# Copyright 2017 The UAI-SDK Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================

import sys
import os
import argparse

from uaitrain.operation.pack_docker_image.keras_pack_op import KerasUAITrainDockerImagePackOp

if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='AI Keras Arch Deployer',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)

subparsers = parser.add_subparsers(dest='commands', help='commands')

pack_op = KerasUAITrainDockerImagePackOp(subparsers)
cmd_args = vars(parser.parse_args())

if cmd_args['commands'] == 'pack':
pack_op.cmd_run(cmd_args)
else:
print("UAI Train Deploy Tool Only Support Packing Docker Images Now")

0 comments on commit 69aa210

Please sign in to comment.