Add Tensorflow Lite Demo

The demo shows how to run the unmodified Tensorflow Lite on Occlum.
This commit is contained in:
LI Qing 2019-08-19 14:44:45 +08:00 committed by Tate, Hongliang Tian
parent 24116c426f
commit ba7db98e49
7 changed files with 258 additions and 3 deletions

7
demo/tensorflow_lite/.gitignore vendored Normal file

@ -0,0 +1,7 @@
tensorflow_src/
deps/
models/
testdata/
label_image
benchmark_model
minimal

@ -0,0 +1,27 @@
# Use Tensorflow Lite in SGX with Occlum
Step 1: Download Tensorflow, build Tensorflow Lite, and download models
```
./download_and_build_tflite.sh
```
When completed, the resulting Tensorflow can be found in `tensorflow_src` directory, the Tensorflow Lite Model can be found in `models` directory
Step 2.1: To run TensorFlow Lite inference demo in Occlum, run
```
./run_tflite_in_occlum.sh demo
```
Step 2.2: To run TensorFlow Lite inference benchmark in Occlum, run
```
./run_tflite_in_occlum.sh benchmark
```
Step 3.1: To run TensorFlow Lite inference demo in Linux, run
```
./run_tflite_in_linux.sh demo
```
Step 3.2: To run TensorFlow Lite inference benchmark in Linux, run
```
./run_tflite_in_linux.sh benchmark
```

@ -0,0 +1,29 @@
#!/bin/bash
set -e
SRC_DIR="tensorflow_src"
# 1. Install dependent zlib
mkdir -p deps/zlib
pushd deps/zlib
git clone https://github.com/madler/zlib .
CC=occlum-gcc CXX=occlum-g++ ./configure --prefix=/usr/local/occlum/x86_64-linux-musl
make
sudo make install
popd
# 2. Build tensorflow lite and the demo program
mkdir -p $SRC_DIR
pushd $SRC_DIR
git clone https://github.com/tensorflow/tensorflow .
git checkout tags/v1.15.0-rc0 -b v1.15.0-rc0
git apply ../patch/fix-tflite-Makefile-v1.15.0-rc0.diff
./tensorflow/lite/tools/make/download_dependencies.sh
make -j 3 -f tensorflow/lite/tools/make/Makefile
popd
# 3. Download tflite model and labels
mkdir models
curl https://storage.googleapis.com/download.tensorflow.org/models/mobilenet_v1_2018_02_22/mobilenet_v1_1.0_224.tgz | tar xzv -C ./models
curl https://storage.googleapis.com/download.tensorflow.org/models/mobilenet_v1_1.0_224_frozen.tgz | tar xzv -C ./models mobilenet_v1_1.0_224/labels.txt
mv ./models/mobilenet_v1_1.0_224/labels.txt ./models/labels.txt
rm -rf ./models/mobilenet_v1_1.0_224

@ -0,0 +1,99 @@
diff --git a/tensorflow/lite/tools/make/Makefile b/tensorflow/lite/tools/make/Makefile
index 73c50d3..3cef7fa 100644
--- a/tensorflow/lite/tools/make/Makefile
+++ b/tensorflow/lite/tools/make/Makefile
@@ -42,7 +42,7 @@ INCLUDES := \
-I$(OBJDIR)
# This is at the end so any globally-installed frameworks like protobuf don't
# override local versions in the source tree.
-INCLUDES += -I/usr/local/include
+INCLUDES += -I/usr/local/occlum/x86_64-linux-musl/include
# These are the default libraries needed, but they can be added to or
# overridden by the platform-specific settings in target makefiles.
@@ -59,7 +59,8 @@ CXXFLAGS := -O3 -DNDEBUG -fPIC
CXXFLAGS += $(EXTRA_CXXFLAGS)
CFLAGS := ${CXXFLAGS}
CXXFLAGS += --std=c++11
-LDOPTS := -L/usr/local/lib
+LDOPTS := -L/usr/local/occlum/x86_64-linux-musl/lib
+LDFLAGS := -pie $(LDOPTS)
ARFLAGS := -r
TARGET_TOOLCHAIN_PREFIX :=
CC_PREFIX :=
@@ -80,6 +81,12 @@ BENCHMARK_BINARY_NAME := benchmark_model
MINIMAL_SRCS := \
tensorflow/lite/examples/minimal/minimal.cc
+# Add label image example.
+LABEL_IMAGE_SRCS := \
+tensorflow/lite/tools/evaluation/utils.cc \
+tensorflow/lite/examples/label_image/label_image.cc \
+tensorflow/lite/examples/label_image/bitmap_helpers.cc
+
# What sources we want to compile, must be kept in sync with the main Bazel
# build files.
@@ -133,7 +140,8 @@ $(wildcard tensorflow/lite/*/*/*test.cc) \
$(wildcard tensorflow/lite/*/*/*/*test.cc) \
$(wildcard tensorflow/lite/kernels/*test_main.cc) \
$(wildcard tensorflow/lite/kernels/*test_util.cc) \
-$(MINIMAL_SRCS)
+$(MINIMAL_SRCS) \
+$(LABEL_IMAGE_SRCS)
BUILD_WITH_MMAP ?= true
ifeq ($(BUILD_TYPE),micro)
@@ -209,6 +217,7 @@ include $(wildcard $(MAKEFILE_DIR)/targets/*_makefile.inc)
ALL_SRCS := \
$(MINIMAL_SRCS) \
+ $(LABEL_IMAGE_SRCS) \
$(PROFILER_SRCS) \
$(PROFILER_SUMMARIZER_SRCS) \
$(TF_LITE_CC_SRCS) \
@@ -225,14 +234,18 @@ LIB_PATH := $(LIBDIR)$(LIB_NAME)
BENCHMARK_LIB := $(LIBDIR)$(BENCHMARK_LIB_NAME)
BENCHMARK_BINARY := $(BINDIR)$(BENCHMARK_BINARY_NAME)
MINIMAL_BINARY := $(BINDIR)minimal
+LABEL_IMAGE_BINARY := $(BINDIR)label_image
-CXX := $(CC_PREFIX)${TARGET_TOOLCHAIN_PREFIX}g++
-CC := $(CC_PREFIX)${TARGET_TOOLCHAIN_PREFIX}gcc
+CXX := occlum-g++
+CC := occlum-gcc
AR := $(CC_PREFIX)${TARGET_TOOLCHAIN_PREFIX}ar
MINIMAL_OBJS := $(addprefix $(OBJDIR), \
$(patsubst %.cc,%.o,$(patsubst %.c,%.o,$(MINIMAL_SRCS))))
+LABEL_IMAGE_OBJS := $(addprefix $(OBJDIR), \
+$(patsubst %.cc,%.o,$(patsubst %.c,%.o,$(LABEL_IMAGE_SRCS))))
+
LIB_OBJS := $(addprefix $(OBJDIR), \
$(patsubst %.cc,%.o,$(patsubst %.c,%.o,$(patsubst %.cpp,%.o,$(TF_LITE_CC_SRCS)))))
@@ -252,7 +265,7 @@ $(OBJDIR)%.o: %.cpp
$(CXX) $(CXXFLAGS) $(INCLUDES) -c $< -o $@
# The target that's compiled if there's no command-line arguments.
-all: $(LIB_PATH) $(MINIMAL_BINARY) $(BENCHMARK_BINARY)
+all: $(LIB_PATH) $(MINIMAL_BINARY) $(LABEL_IMAGE_BINARY) $(BENCHMARK_BINARY)
# The target that's compiled for micro-controllers
micro: $(LIB_PATH)
@@ -276,6 +289,14 @@ $(MINIMAL_BINARY): $(MINIMAL_OBJS) $(LIB_PATH)
minimal: $(MINIMAL_BINARY)
+$(LABEL_IMAGE_BINARY): $(LIB_PATH) $(LABEL_IMAGE_OBJS)
+ @mkdir -p $(dir $@)
+ $(CXX) $(CXXFLAGS) $(INCLUDES) \
+ -o $(LABEL_IMAGE_BINARY) $(LABEL_IMAGE_OBJS) \
+ $(LIBFLAGS) $(LIB_PATH) $(LDFLAGS) $(LIBS)
+
+label_image: $(LABEL_IMAGE_BINARY)
+
$(BENCHMARK_LIB) : $(LIB_PATH) $(BENCHMARK_OBJS)
@mkdir -p $(dir $@)
$(AR) $(ARFLAGS) $(BENCHMARK_LIB) $(LIB_OBJS) $(BENCHMARK_OBJS)

@ -0,0 +1,41 @@
#!/bin/bash
export LD_LIBRARY_PATH=/usr/local/occlum/x86_64-linux-musl/lib:$LD_LIBRARY_PATH
set -e
show_usage() {
echo
echo "Usage: $0 demo/benchmark"
echo
}
copy_files() {
cp -f tensorflow_src/tensorflow/lite/tools/make/gen/linux_x86_64/bin/* .
cp -rf tensorflow_src/tensorflow/lite/examples/label_image/testdata .
}
run_demo() {
copy_files
./label_image \
--tflite_model ./models/mobilenet_v1_1.0_224.tflite \
--labels ./models/labels.txt \
--image ./testdata/grace_hopper.bmp
}
run_benchmark() {
copy_files
./benchmark_model \
--graph=./models/mobilenet_v1_1.0_224.tflite \
--warmup_runs=5
}
bin=$1
case "$bin" in
demo)
run_demo
;;
benchmark)
run_benchmark
;;
*)
show_usage
esac

@ -0,0 +1,52 @@
#!/bin/bash
set -e
show_usage() {
echo
echo "Usage: $0 demo/benchmark"
echo
}
init_workspace() {
rm -rf occlum_workspace
mkdir occlum_workspace
cd occlum_workspace
occlum init
}
build_occlum() {
cp ../tensorflow_src/tensorflow/lite/tools/make/gen/linux_x86_64/bin/* image/bin
cp /usr/local/occlum/x86_64-linux-musl/lib/libz.so.1 image/lib
cp -r ../models image
cp -r ../tensorflow_src/tensorflow/lite/examples/label_image/testdata image
occlum build
}
run_demo() {
init_workspace
build_occlum
occlum run /bin/label_image \
--tflite_model ./models/mobilenet_v1_1.0_224.tflite \
--labels ./models/labels.txt \
--image ./testdata/grace_hopper.bmp
}
run_benchmark() {
init_workspace
build_occlum
occlum run /bin/benchmark_model \
--graph=./models/mobilenet_v1_1.0_224.tflite \
--warmup_runs=5
}
bin=$1
case "$bin" in
demo)
run_demo
;;
benchmark)
run_benchmark
;;
*)
show_usage
esac

@ -1,11 +1,11 @@
{
"vm": {
"user_space_size": "128MB"
"user_space_size": "256MB"
},
"process": {
"default_stack_size": "4MB",
"default_heap_size": "16MB",
"default_mmap_size": "32MB"
"default_heap_size": "32MB",
"default_mmap_size": "80MB"
},
"env": [
"OCCLUM=yes"