- Mark as New
- Bookmark
- Subscribe
- Mute
- Subscribe to RSS Feed
- Permalink
- Report Inappropriate Content
HI everyone, i'm having some issues with NCS tools, my network compiles but if i try to check it i get this error:
mvNCCheck v02.00, Copyright @ Movidius Ltd 2016
USB: Transferring Data...
[Error 25] Myriad Error: "Softmax axis parameter = NULL".
Here is my prototxt hoping this helps:
name: "UNIPINET"
input:"data"
input_shape{
dim: 1
dim: 1
dim: 63
dim: 13
}
layer {
name: "conv1/dw"
type: "Convolution"
bottom: "data"
top: "conv1/dw"
convolution_param {
num_output: 1
bias_term: false
kernel_h: 15
kernel_w: 15
pad_w: 12
group: 1
#engine: CAFFE
stride: 1
weight_filler {
type: "constant"
value: 0
}
}
}
layer{
name: "Deconv1"
type: "Deconvolution"
bottom: "conv1/dw"
top: "Deconv1"
convolution_param {
num_output: 1
bias_term: false
kernel_size: 15
stride: 1
pad_w:13
pad_h:7
}
}
layer {
name: "conv1/dw/cropped"
type: "Crop"
bottom: "conv1/dw"
bottom: "Deconv1"
top: "conv1/dw/cropped"
crop_param {
axis: -1
offset: 12
}
}
layer {
name: "conv1/sep"
type: "Convolution"
bottom: "conv1/dw/cropped"
top: "conv1/sep"
convolution_param {
num_output: 16
bias_term: true
pad: 0
kernel_size: 1
stride: 1
weight_filler {
type: "msra"
}
}
}
layer {
name: "conv1/sep/bn"
type: "BatchNorm"
bottom: "conv1/sep"
top: "conv1/sep/bn"
}
layer {
name: "conv1/sep/bn/scale"
type: "Scale"
bottom: "conv1/sep/bn"
top: "conv1/sep/bn"
scale_param {
filler {
value: 1
}
bias_term: true
bias_filler {
value: 0
}
}
}
layer {
name: "relu1/sep"
type: "ReLU"
bottom: "conv1/sep/bn"
top: "conv1/sep/rl"
}
layer {
name: "conv2/dw"
type: "Convolution"
bottom: "conv1/sep/rl"
top: "conv2/dw"
convolution_param {
num_output: 16
bias_term: false
pad_w: 7
kernel_h: 10
kernel_w: 10
group: 16
#engine: CAFFE
stride: 1
weight_filler {
type: "msra"
}
}
}
layer{
name: "Deconv2"
type: "Deconvolution"
bottom: "conv2/dw"
top: "Deconv2"
convolution_param {
num_output: 16
bias_term: false
kernel_size: 10
stride: 1
pad_w:8
pad_h:4
}
}
layer {
name: "conv2/dw/cropped"
type: "Crop"
bottom: "conv2/dw"
bottom: "Deconv2"
top: "conv2/dw/cropped"
crop_param {
axis: -1
offset: 7
}
}
layer {
name: "conv2/sep"
type: "Convolution"
bottom: "conv2/dw/cropped"
top: "conv2/sep"
convolution_param {
num_output: 64
bias_term: true
pad: 0
kernel_size: 1
stride: 1
weight_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "conv2/sep/bn"
type: "BatchNorm"
bottom: "conv2/sep"
top: "conv2/sep"
}
layer {
name: "conv2/sep/bn/scale"
type: "Scale"
bottom: "conv2/sep"
top: "conv2/sep"
scale_param {
filler {
value: 1
}
bias_term: true
bias_filler {
value: 0
}
}
}
layer {
name: "relu2/sep"
type: "ReLU"
bottom: "conv2/sep"
top: "conv2/sep"
}
layer {
name: "conv3/dw"
type: "Convolution"
bottom: "conv2/sep"
top: "conv3/dw"
convolution_param {
num_output: 64
bias_term: false
pad_w: 2
kernel_h: 5
kernel_w: 5
group: 64
#engine: CAFFE
stride: 1
weight_filler {
type: "constant"
value: 0
}
}
}
layer{
name: "Deconv3"
type: "Deconvolution"
bottom: "conv3/dw"
top: "Deconv3"
convolution_param {
num_output: 64
bias_term: false
kernel_size: 5
stride: 1
pad_w:3
pad_h:2
}
}
layer {
name: "conv3/dw/cropped"
type: "Crop"
bottom: "conv3/dw"
bottom: "Deconv3"
top: "conv3/dw/cropped"
crop_param {
axis: -1
offset: 2
}
}
layer {
name: "conv3/sep"
type: "Convolution"
bottom: "conv3/dw/cropped"
top: "conv3/sep"
convolution_param {
num_output: 128
bias_term: true
pad: 0
kernel_size: 1
stride: 1
weight_filler {
type: "msra"
}
}
}
layer {
name: "conv3/sep/bn"
type: "BatchNorm"
bottom: "conv3/sep"
top: "conv3/sep"
}
layer {
name: "conv3/sep/bn/scale"
type: "Scale"
bottom: "conv3/sep"
top: "conv3/sep"
scale_param {
filler {
value: 1
}
bias_term: true
bias_filler {
value: 0
}
}
}
layer {
name: "relu3/sep"
type: "ReLU"
bottom: "conv3/sep"
top: "conv3/sep"
}
layer {
name: "avg_pool"
type: "Pooling"
bottom: "conv3/sep"
top: "avg_pool"
pooling_param {
pool: AVE
global_pooling: true
}
}
layer {
name: "fc"
type: "InnerProduct"
bottom: "avg_pool"
top: "fc"
inner_product_param {
num_output: 12
weight_filler {
type: "msra"
}
bias_filler {
type: "constant"
value: 0
}
}
}
layer {
name: "fc/bn"
type: "BatchNorm"
bottom: "fc"
top: "fc"
}
layer {
name: "fc/bn/scale"
type: "Scale"
bottom: "fc"
top: "fc"
scale_param {
filler {
value: 1
}
bias_term: true
bias_filler {
value: 0
}
}
}
layer {
name: "output"
type: "Softmax"
bottom: "fc"
top: "output"
}
- Tags:
- Engine
Link Copied
- Mark as New
- Bookmark
- Subscribe
- Mute
- Subscribe to RSS Feed
- Permalink
- Report Inappropriate Content
@Ryose I'm not sure if this is the cause of your issue, but while examining your prototxt file I noticed that in layer conv2/dw you have a depthwise convolution of 10x10 and currently we only have support for depth-wise convolutions that are 3x3 (i.e. SSD Mobilenet Caffe).
- Mark as New
- Bookmark
- Subscribe
- Mute
- Subscribe to RSS Feed
- Permalink
- Report Inappropriate Content
@Tome_at_Intel So if i specify the group attribute of a caffe layer, i can only use 3x3Conv, is that right?
- Mark as New
- Bookmark
- Subscribe
- Mute
- Subscribe to RSS Feed
- Permalink
- Report Inappropriate Content
@Ryose Yes, to be more specific, if the group parameter is set to more than 1, you can only use 3x3 convolutions. Please let me know if you run into any other issues.
- Subscribe to RSS Feed
- Mark Topic as New
- Mark Topic as Read
- Float this Topic for Current User
- Bookmark
- Subscribe
- Printer Friendly Page