Skip to content

Commit

Permalink
update caffe case (huaweicloud#1709)
Browse files Browse the repository at this point in the history
* Delete train.py

* Delete lenet_train_test.prototxt

* Delete lenet_solver.prototxt

* Create README.md

* Add files via upload

* Delete README.md

* Add files via upload
  • Loading branch information
chaoxu1203 committed Feb 11, 2020
1 parent 9dce718 commit d1e8dca
Show file tree
Hide file tree
Showing 7 changed files with 323 additions and 109 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"apis": [{"url": "/", "request": {"Content-type": "multipart/form-data", "data": {"type": "object", "properties": {"images": {"type": "file"}}}}, "protocol": "http", "method": "post", "response": {"Content-type": "multipart/form-data", "data": {"required": ["predicted_label", "scores"], "type": "object", "properties": {"predicted_label": {"type": "string"}, "scores": {"items": {"minItems": 2, "items": [{"type": "string"}, {"type": "number"}], "type": "array", "maxItems": 2}, "type": "array"}}}}}], "service_name": "builtin_algorithms", "model_algorithm": "image_classification", "metrics": {"f1": 1.0, "accuracy": 1.0, "precision": 1.0, "recall": 1.0}, "tunable": false, "model_source": "algos", "model_type": "Caffe"}
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import caffe
from model_service.caffe_model_service import CaffeBaseService
LABELS = {'0': '0', '1': '1', '2': '2', '3': '3', '4': '4',
'5': '5', '6': '6', '7': '7', '8': '8', '9': '9'}

class ResnetService(CaffeBaseService):

def __init__(self, model_name, model_path):
super(ResnetService, self).__init__(model_name, model_path)
# load input and configure preprocessing
transformer = caffe.io.Transformer({'data': self.net.blobs['data'].data.shape})
transformer.set_transpose('data', (2, 0, 1))
self.transformer = transformer
self.num_classes = len(LABELS)

def _preprocess(self, data):
for _, v in data.items():
for _, file_content in v.items():
im = caffe.io.load_image(file_content, color=False)
self.net.blobs['data'].data[...] = self.transformer.preprocess('data', im)
return

def _postprocess(self, data):
data = self.net.blobs['prob'].data[0]
end_idx = -6 if self.num_classes >= 5 else -self.num_classes - 1
top_k = data.argsort()[-1:end_idx:-1]
print(top_k)
return {
"predicted_label":
LABELS[str(top_k[0])],
"scores":
[[LABELS[str(idx)], float(data[idx])] for idx in top_k]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,139 @@
name: "LeNet"
layer {
name: "mnist"
type: "Input"
top: "data"
transform_param {
scale: 0.00390625
}
input_param {
shape {
dim: 1
dim: 1
dim: 28
dim: 28
}
}
}
layer {
name: "conv1"
type: "Convolution"
bottom: "data"
top: "conv1"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 20
kernel_size: 5
stride: 1
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "pool1"
type: "Pooling"
bottom: "conv1"
top: "pool1"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer {
name: "conv2"
type: "Convolution"
bottom: "pool1"
top: "conv2"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 50
kernel_size: 5
stride: 1
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "pool2"
type: "Pooling"
bottom: "conv2"
top: "pool2"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer {
name: "ip1"
type: "InnerProduct"
bottom: "pool2"
top: "ip1"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
inner_product_param {
num_output: 500
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "relu1"
type: "ReLU"
bottom: "ip1"
top: "ip1"
}
layer {
name: "ip2"
type: "InnerProduct"
bottom: "ip1"
top: "ip2"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
inner_product_param {
num_output: 10
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "prob"
type: "Softmax"
bottom: "ip2"
top: "prob"
}
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# The train/test net protocol buffer definition
net: "/home/work/user-job-dir/codes/lenet_train_test.prototxt"
net: "/cache/lenet_train_test.prototxt"
# test_iter specifies how many forward passes the test should carry out.
# In the case of MNIST, we have test batch size 100 and 100 test iterations,
# covering the full 10,000 testing images.
Expand All @@ -20,6 +20,6 @@ display: 100
max_iter: 10000
# snapshot intermediate results
snapshot: 500
snapshot_prefix: "/home/work/lenet_mnist/mnist_lenet"
snapshot_prefix: "/cache/lenet_mnist/mnist_lenet"
# solver mode: CPU or GPU
solver_mode: GPU
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ layer {
scale: 0.00390625
}
data_param {
source: "/home/work/dataset/mnist_train_lmdb"
source: "/cache/dataset/mnist_train_lmdb"
batch_size: 1024
backend: LMDB
}
Expand All @@ -28,7 +28,7 @@ layer {
scale: 0.00390625
}
data_param {
source: "/home/work/dataset/mnist_test_lmdb"
source: "/cache/dataset/mnist_test_lmdb"
batch_size: 512
backend: LMDB
}
Expand Down

This file was deleted.

Loading

0 comments on commit d1e8dca

Please sign in to comment.