1

我创建了三个顶部的 HDF5 数据文件,并将其用作我的简单多任务 Caffe Net 的输入以进行实验。我总是在某个时候得到同样的错误(有时在开始时,有时在它开始测试阶段(间隔)之后一点点)。我曾尝试减少批次数(低至 4 个),并一次以 hdf5 块写入 100 个图像(从 4000 个逐渐下降)。它总是同样的错误。但是,当我在训练和评估的 hdf5 列表文件 (txt) 中仅尝试 2-3 个 hdf5 文件时 - 它可以工作(至少在功能上)。这是我的prototxt:

name: "AlexNet"
layer {
  name: "data"
  type: "HDF5Data"
  top: "data"
  top: "ph"
  top: "sm"
   hdf5_data_param {
      source: "/home/caffe/examples/det/train_h5_list.txt"
      batch_size: 128
      }
  include {
    phase: TRAIN
  }

}

layer {
  name: "data"
  type: "HDF5Data"
  top: "data"
  top: "ph"
  top: "sm"
   hdf5_data_param {
      source: "/home/caffe/examples/det/eval_h5_list.txt"
      batch_size: 128
      }
 include {
    phase: TEST
  }
}

layer {
  name: "conv1"
  type: "Convolution"
  bottom: "data"
  top: "conv1"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 96
    kernel_size: 11
    stride: 4
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "relu1"
  type: "ReLU"
  bottom: "conv1"
  top: "conv1"
}
layer {
  name: "norm1"
  type: "LRN"
  bottom: "conv1"
  top: "norm1"
  lrn_param {
    local_size: 5
    alpha: 0.0001
    beta: 0.75
  }
}
layer {
  name: "pool1"
  type: "Pooling"
  bottom: "norm1"
  top: "pool1"
  pooling_param {
    pool: MAX
    kernel_size: 3
    stride: 2
  }
}
layer {
  name: "conv2"
  type: "Convolution"
  bottom: "pool1"
  top: "conv2"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 256
    pad: 2
    kernel_size: 5
    group: 2
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0.1
    }
  }
}
layer {
  name: "relu2"
  type: "ReLU"
  bottom: "conv2"
  top: "conv2"
}
layer {
  name: "norm2"
  type: "LRN"
  bottom: "conv2"
  top: "norm2"
  lrn_param {
    local_size: 5
    alpha: 0.0001
    beta: 0.75
  }
}
layer {
  name: "pool2"
  type: "Pooling"
  bottom: "norm2"
  top: "pool2"
  pooling_param {
    pool: MAX
    kernel_size: 3
    stride: 2
  }
}
layer {
  name: "conv3"
  type: "Convolution"
  bottom: "pool2"
  top: "conv3"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 384
    pad: 1
    kernel_size: 3
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}
layer {
  name: "relu3"
  type: "ReLU"
  bottom: "conv3"
  top: "conv3"
}
layer {
  name: "conv4"
  type: "Convolution"
  bottom: "conv3"
  top: "conv4"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 384
    pad: 1
    kernel_size: 3
    group: 2
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0.1
    }
  }
}
layer {
  name: "relu4"
  type: "ReLU"
  bottom: "conv4"
  top: "conv4"
}
layer {
  name: "conv5"
  type: "Convolution"
  bottom: "conv4"
  top: "conv5"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  convolution_param {
    num_output: 256
    pad: 1
    kernel_size: 3
    group: 2
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0.1
    }
  }
}
layer {
  name: "relu5"
  type: "ReLU"
  bottom: "conv5"
  top: "conv5"
}
layer {
  name: "pool5"
  type: "Pooling"
  bottom: "conv5"
  top: "pool5"
  pooling_param {
    pool: MAX
    kernel_size: 3
    stride: 2
  }
}
layer {
  name: "fc6"
  type: "InnerProduct"
  bottom: "pool5"
  top: "fc6"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  inner_product_param {
    num_output: 4096
    weight_filler {
      type: "gaussian"
      std: 0.005
    }
    bias_filler {
      type: "constant"
      value: 0.1
    }
  }
}
layer {
  name: "relu6"
  type: "ReLU"
  bottom: "fc6"
  top: "fc6"
}
layer {
  name: "drop6"
  type: "Dropout"
  bottom: "fc6"
  top: "fc6"
  dropout_param {
    dropout_ratio: 0.5
  }
}
layer {
  name: "fc7"
  type: "InnerProduct"
  bottom: "fc6"
  top: "fc7"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  inner_product_param {
    num_output: 4096
    weight_filler {
      type: "gaussian"
      std: 0.005
    }
    bias_filler {
      type: "constant"
      value: 0.1
    }
  }
}
layer {
  name: "relu7"
  type: "ReLU"
  bottom: "fc7"
  top: "fc7"
}
layer {
  name: "drop7"
  type: "Dropout"
  bottom: "fc7"
  top: "fc7"
  dropout_param {
    dropout_ratio: 0.5
  }
}
layer {
  name: "fc8"
  type: "InnerProduct"
  bottom: "fc7"
  top: "fc8"
  param {
    lr_mult: 1
    decay_mult: 1
  }
  param {
    lr_mult: 2
    decay_mult: 0
  }
  inner_product_param {
    num_output: 1000
    weight_filler {
      type: "gaussian"
      std: 0.01
    }
    bias_filler {
      type: "constant"
      value: 0
    }
  }
}




layer {
  bottom: "fc8"
  top: "phfc8"
  name: "phfc8"
  type: "InnerProduct"
  inner_product_param {
    num_output: 7
  }
}


layer {
  bottom: "fc8"
  top: "smfc8"
  name: "smfc8"
  type: "InnerProduct"
  inner_product_param {
    num_output: 2
  }
}


layer {
  name: "phloss"
  type: "SoftmaxWithLoss"
  bottom: "phfc8"
  bottom: "ph"
  top: "phloss"
}

layer {
  name: "smloss"
  type: "SoftmaxWithLoss"
  bottom: "smfc8"
  bottom: "sm"
  top: "smloss"
}

我收到此错误:

 blob.cpp:133] Check failed: data_ 
*** Check failure stack trace: ***
    @     0x7fd411ac85cd  google::LogMessage::Fail()
    @     0x7fd411aca433  google::LogMessage::SendToLog()
    @     0x7fd411ac815b  google::LogMessage::Flush()
    @     0x7fd411acae1e  google::LogMessageFatal::~LogMessageFatal()
    @     0x7fd41226518b  caffe::Blob<>::mutable_cpu_data()
    @     0x7fd4122824a1  caffe::hdf5_load_nd_dataset<>()
    @     0x7fd412158f73  caffe::HDF5DataLayer<>::LoadHDF5FileData()
    @     0x7fd4121571fd  caffe::HDF5DataLayer<>::Next()
    @     0x7fd4122a9a93  caffe::HDF5DataLayer<>::Forward_gpu()
    @     0x7fd4120dda21  caffe::Net<>::ForwardFromTo()
    @     0x7fd4120ddb27  caffe::Net<>::Forward()
    @     0x7fd41225c332  caffe::Solver<>::Test()
    @     0x7fd41225cd4e  caffe::Solver<>::TestAll()
    @     0x7fd4122602c7  caffe::Solver<>::Step()
    @     0x7fd41226058a  caffe::Solver<>::Solve()
    @           0x40aba4  train()
    @           0x407390  main
    @     0x7fd410a38830  __libc_start_main
    @           0x407bb9  _start
    @              (nil)  (unknown)
Aborted (core dumped)

这是我对 HDF5 文件进行预处理和写入的尝试。

import h5py, os
import sys
sys.path.append("/home/caffe/python")
import caffe
import numpy as np
import cv2

output_side_length = 224

def resize_and_crop_image(img, output_side_length = 224):
        '''Takes an image name, resize it and crop the center square
        '''
        height, width, depth = img.shape
        print(img.shape)
        new_height = output_side_length
        new_width = output_side_length
        if height > width:
            new_height = int(output_side_length * height / width)
        else:
            new_width = int(output_side_length * width / height)
            print(new_width)
        resized_img = cv2.resize(img, (new_width, new_height))
        height_offset = int((new_height - output_side_length) / 2)
        width_offset = int((new_width - output_side_length) / 2)
        cropped_img = resized_img[height_offset:height_offset + output_side_length,
                                  width_offset:width_offset + output_side_length]
        return cropped_img
        #print(cropped_img)

with open( '/home/caffe/data/Ch/train_random.txt', 'r' ) as T :
    lines = T.readlines()
# If you do not have enough memory split data into
# multiple batches and generate multiple separate h5 files
imgdata = np.zeros( (len(lines), 3, output_side_length, output_side_length), dtype='f4' ) 
sm = np.zeros( (len(lines),1), dtype='f4' )
ph = np.zeros( (len(lines),1), dtype='f4' )

for i,l in enumerate(lines):
    sp = l.split(' ')
    #print(sp)
    video = sp[0].split('_')[0]
    #print(video)
    impath =  '/home/caffe/data/Ch/images/'+video+'/'+sp[0] +'.jpg' 
    #print(impath)

    img = cv2.imread(impath)
    #print(img)


    cropped_img = resize_and_crop_image(img)
    #print(cropped_img.shape)


    image_mean = [128, 128, 128]
    proc_img = cropped_img - image_mean     

    #already in channel order
    #already in 255

    proc_img = proc_img / 255

    transposed_img = proc_img.transpose((2,0,1)) # Channel x Height x Width order (switch from H x W x C)

    imgdata[i] = transposed_img
    sm[i] = float(sp[1])
    ph[i] = float(sp[2])

datalist = range(0,len(imgdata)+100,100)

with open('train_h5_list.txt','w') as L:
    for i in datalist:
        with h5py.File('train'+str(i)+'.h5','w') as H:
            H.create_dataset( 'data', data=imgdata[i:i+100] ) # note the name X given to the dataset!
            H.create_dataset( 'sm', data=sm[i:i+100] ) # note the name y given to the dataset!
            H.create_dataset( 'ph', data=ph[i:i+100] ) # note the name y given to the dataset!
            L.write( 'train'+str(i)+'.h5'+'\n' ) # list all h5 files you are going to use

完整输出(用于初始化训练(eval 类似)):

I1222 18:54:43.020535 28759 layer_factory.hpp:77] Creating layer data
I1222 18:54:43.020550 28759 net.cpp:84] Creating Layer data
I1222 18:54:43.020555 28759 net.cpp:380] data -> data
I1222 18:54:43.020576 28759 net.cpp:380] data -> ph
I1222 18:54:43.020584 28759 net.cpp:380] data -> sm
I1222 18:54:43.020591 28759 hdf5_data_layer.cpp:80] Loading list of HDF5 filenames from: /home/caffe/examples/det/train_h5_list.txt
I1222 18:54:43.020783 28759 hdf5_data_layer.cpp:94] Number of HDF5 files: 606
I1222 18:54:43.022075 28759 hdf5.cpp:32] Datatype class: H5T_FLOAT
I1222 18:54:48.262107 28759 net.cpp:122] Setting up data
I1222 18:54:48.262153 28759 net.cpp:129] Top shape: 128 3 224 224 (19267584)
I1222 18:54:48.262163 28759 net.cpp:129] Top shape: 128 1 (128)
I1222 18:54:48.262172 28759 net.cpp:129] Top shape: 128 1 (128)
I1222 18:54:48.262181 28759 net.cpp:137] Memory required for data: 77071360
I1222 18:54:48.262193 28759 layer_factory.hpp:77] Creating layer conv1
I1222 18:54:48.262231 28759 net.cpp:84] Creating Layer conv1
I1222 18:54:48.262245 28759 net.cpp:406] conv1 <- data
I1222 18:54:48.262269 28759 net.cpp:380] conv1 -> conv1
I1222 18:54:48.866920 28759 net.cpp:122] Setting up conv1
I1222 18:54:48.866946 28759 net.cpp:129] Top shape: 128 96 54 54 (35831808)
I1222 18:54:48.866950 28759 net.cpp:137] Memory required for data: 220398592
I1222 18:54:48.866971 28759 layer_factory.hpp:77] Creating layer relu1
I1222 18:54:48.866986 28759 net.cpp:84] Creating Layer relu1
I1222 18:54:48.867033 28759 net.cpp:406] relu1 <- conv1
I1222 18:54:48.867040 28759 net.cpp:367] relu1 -> conv1 (in-place)
I1222 18:54:48.867411 28759 net.cpp:122] Setting up relu1
I1222 18:54:48.867421 28759 net.cpp:129] Top shape: 128 96 54 54 (35831808)
I1222 18:54:48.867424 28759 net.cpp:137] Memory required for data: 363725824
I1222 18:54:48.867427 28759 layer_factory.hpp:77] Creating layer norm1
I1222 18:54:48.867434 28759 net.cpp:84] Creating Layer norm1
I1222 18:54:48.867436 28759 net.cpp:406] norm1 <- conv1
I1222 18:54:48.867441 28759 net.cpp:380] norm1 -> norm1
I1222 18:54:48.867823 28759 net.cpp:122] Setting up norm1
I1222 18:54:48.867853 28759 net.cpp:129] Top shape: 128 96 54 54 (35831808)
I1222 18:54:48.867856 28759 net.cpp:137] Memory required for data: 507053056
I1222 18:54:48.867858 28759 layer_factory.hpp:77] Creating layer pool1
I1222 18:54:48.867866 28759 net.cpp:84] Creating Layer pool1
I1222 18:54:48.867868 28759 net.cpp:406] pool1 <- norm1
I1222 18:54:48.867873 28759 net.cpp:380] pool1 -> pool1
I1222 18:54:48.867913 28759 net.cpp:122] Setting up pool1
I1222 18:54:48.867919 28759 net.cpp:129] Top shape: 128 96 27 27 (8957952)
I1222 18:54:48.867923 28759 net.cpp:137] Memory required for data: 542884864
I1222 18:54:48.867926 28759 layer_factory.hpp:77] Creating layer conv2
I1222 18:54:48.867938 28759 net.cpp:84] Creating Layer conv2
I1222 18:54:48.867941 28759 net.cpp:406] conv2 <- pool1
I1222 18:54:48.867946 28759 net.cpp:380] conv2 -> conv2
I1222 18:54:48.875316 28759 net.cpp:122] Setting up conv2
I1222 18:54:48.875347 28759 net.cpp:129] Top shape: 128 256 27 27 (23887872)
I1222 18:54:48.875356 28759 net.cpp:137] Memory required for data: 638436352
I1222 18:54:48.875376 28759 layer_factory.hpp:77] Creating layer relu2
I1222 18:54:48.875416 28759 net.cpp:84] Creating Layer relu2
I1222 18:54:48.875427 28759 net.cpp:406] relu2 <- conv2
I1222 18:54:48.875440 28759 net.cpp:367] relu2 -> conv2 (in-place)
I1222 18:54:48.876116 28759 net.cpp:122] Setting up relu2
I1222 18:54:48.876127 28759 net.cpp:129] Top shape: 128 256 27 27 (23887872)
I1222 18:54:48.876128 28759 net.cpp:137] Memory required for data: 733987840
I1222 18:54:48.876132 28759 layer_factory.hpp:77] Creating layer norm2
I1222 18:54:48.876138 28759 net.cpp:84] Creating Layer norm2
I1222 18:54:48.876142 28759 net.cpp:406] norm2 <- conv2
I1222 18:54:48.876147 28759 net.cpp:380] norm2 -> norm2
I1222 18:54:48.876526 28759 net.cpp:122] Setting up norm2
I1222 18:54:48.876535 28759 net.cpp:129] Top shape: 128 256 27 27 (23887872)
I1222 18:54:48.876538 28759 net.cpp:137] Memory required for data: 829539328
I1222 18:54:48.876539 28759 layer_factory.hpp:77] Creating layer pool2
I1222 18:54:48.876547 28759 net.cpp:84] Creating Layer pool2
I1222 18:54:48.876550 28759 net.cpp:406] pool2 <- norm2
I1222 18:54:48.876554 28759 net.cpp:380] pool2 -> pool2
I1222 18:54:48.876587 28759 net.cpp:122] Setting up pool2
I1222 18:54:48.876593 28759 net.cpp:129] Top shape: 128 256 13 13 (5537792)
I1222 18:54:48.876596 28759 net.cpp:137] Memory required for data: 851690496
I1222 18:54:48.876598 28759 layer_factory.hpp:77] Creating layer conv3
I1222 18:54:48.876608 28759 net.cpp:84] Creating Layer conv3
I1222 18:54:48.876615 28759 net.cpp:406] conv3 <- pool2
I1222 18:54:48.876621 28759 net.cpp:380] conv3 -> conv3
I1222 18:54:48.887678 28759 net.cpp:122] Setting up conv3
I1222 18:54:48.887698 28759 net.cpp:129] Top shape: 128 384 13 13 (8306688)
I1222 18:54:48.887701 28759 net.cpp:137] Memory required for data: 884917248
I1222 18:54:48.887713 28759 layer_factory.hpp:77] Creating layer relu3
I1222 18:54:48.887722 28759 net.cpp:84] Creating Layer relu3
I1222 18:54:48.887724 28759 net.cpp:406] relu3 <- conv3
I1222 18:54:48.887732 28759 net.cpp:367] relu3 -> conv3 (in-place)
I1222 18:54:48.888123 28759 net.cpp:122] Setting up relu3
I1222 18:54:48.888133 28759 net.cpp:129] Top shape: 128 384 13 13 (8306688)
I1222 18:54:48.888134 28759 net.cpp:137] Memory required for data: 918144000
I1222 18:54:48.888137 28759 layer_factory.hpp:77] Creating layer conv4
I1222 18:54:48.888147 28759 net.cpp:84] Creating Layer conv4
I1222 18:54:48.888150 28759 net.cpp:406] conv4 <- conv3
I1222 18:54:48.888154 28759 net.cpp:380] conv4 -> conv4
I1222 18:54:48.897979 28759 net.cpp:122] Setting up conv4
I1222 18:54:48.897995 28759 net.cpp:129] Top shape: 128 384 13 13 (8306688)
I1222 18:54:48.898000 28759 net.cpp:137] Memory required for data: 951370752
I1222 18:54:48.898006 28759 layer_factory.hpp:77] Creating layer relu4
I1222 18:54:48.898015 28759 net.cpp:84] Creating Layer relu4
I1222 18:54:48.898020 28759 net.cpp:406] relu4 <- conv4
I1222 18:54:48.898025 28759 net.cpp:367] relu4 -> conv4 (in-place)
I1222 18:54:48.898628 28759 net.cpp:122] Setting up relu4
I1222 18:54:48.898641 28759 net.cpp:129] Top shape: 128 384 13 13 (8306688)
I1222 18:54:48.898643 28759 net.cpp:137] Memory required for data: 984597504
I1222 18:54:48.898646 28759 layer_factory.hpp:77] Creating layer conv5
I1222 18:54:48.898655 28759 net.cpp:84] Creating Layer conv5
I1222 18:54:48.898658 28759 net.cpp:406] conv5 <- conv4
I1222 18:54:48.898663 28759 net.cpp:380] conv5 -> conv5
I1222 18:54:48.905979 28759 net.cpp:122] Setting up conv5
I1222 18:54:48.905997 28759 net.cpp:129] Top shape: 128 256 13 13 (5537792)
I1222 18:54:48.906002 28759 net.cpp:137] Memory required for data: 1006748672
I1222 18:54:48.906014 28759 layer_factory.hpp:77] Creating layer relu5
I1222 18:54:48.906023 28759 net.cpp:84] Creating Layer relu5
I1222 18:54:48.906028 28759 net.cpp:406] relu5 <- conv5
I1222 18:54:48.906033 28759 net.cpp:367] relu5 -> conv5 (in-place)
I1222 18:54:48.906394 28759 net.cpp:122] Setting up relu5
I1222 18:54:48.906404 28759 net.cpp:129] Top shape: 128 256 13 13 (5537792)
I1222 18:54:48.906405 28759 net.cpp:137] Memory required for data: 1028899840
I1222 18:54:48.906409 28759 layer_factory.hpp:77] Creating layer pool5
I1222 18:54:48.906416 28759 net.cpp:84] Creating Layer pool5
I1222 18:54:48.906419 28759 net.cpp:406] pool5 <- conv5
I1222 18:54:48.906424 28759 net.cpp:380] pool5 -> pool5
I1222 18:54:48.906466 28759 net.cpp:122] Setting up pool5
I1222 18:54:48.906471 28759 net.cpp:129] Top shape: 128 256 6 6 (1179648)
I1222 18:54:48.906474 28759 net.cpp:137] Memory required for data: 1033618432
I1222 18:54:48.906478 28759 layer_factory.hpp:77] Creating layer fc6
I1222 18:54:48.906489 28759 net.cpp:84] Creating Layer fc6
I1222 18:54:48.906492 28759 net.cpp:406] fc6 <- pool5
I1222 18:54:48.906498 28759 net.cpp:380] fc6 -> fc6
I1222 18:54:49.236657 28759 net.cpp:122] Setting up fc6
I1222 18:54:49.236685 28759 net.cpp:129] Top shape: 128 4096 (524288)
I1222 18:54:49.236686 28759 net.cpp:137] Memory required for data: 1035715584
I1222 18:54:49.236694 28759 layer_factory.hpp:77] Creating layer relu6
I1222 18:54:49.236706 28759 net.cpp:84] Creating Layer relu6
I1222 18:54:49.236709 28759 net.cpp:406] relu6 <- fc6
I1222 18:54:49.236716 28759 net.cpp:367] relu6 -> fc6 (in-place)
I1222 18:54:49.237154 28759 net.cpp:122] Setting up relu6
I1222 18:54:49.237164 28759 net.cpp:129] Top shape: 128 4096 (524288)
I1222 18:54:49.237166 28759 net.cpp:137] Memory required for data: 1037812736
I1222 18:54:49.237169 28759 layer_factory.hpp:77] Creating layer drop6
I1222 18:54:49.237175 28759 net.cpp:84] Creating Layer drop6
I1222 18:54:49.237179 28759 net.cpp:406] drop6 <- fc6
I1222 18:54:49.237182 28759 net.cpp:367] drop6 -> fc6 (in-place)
I1222 18:54:49.237212 28759 net.cpp:122] Setting up drop6
I1222 18:54:49.237217 28759 net.cpp:129] Top shape: 128 4096 (524288)
I1222 18:54:49.237221 28759 net.cpp:137] Memory required for data: 1039909888
I1222 18:54:49.237224 28759 layer_factory.hpp:77] Creating layer fc7
I1222 18:54:49.237231 28759 net.cpp:84] Creating Layer fc7
I1222 18:54:49.237234 28759 net.cpp:406] fc7 <- fc6
I1222 18:54:49.237239 28759 net.cpp:380] fc7 -> fc7
I1222 18:54:49.384224 28759 net.cpp:122] Setting up fc7
I1222 18:54:49.384255 28759 net.cpp:129] Top shape: 128 4096 (524288)
I1222 18:54:49.384258 28759 net.cpp:137] Memory required for data: 1042007040
I1222 18:54:49.384266 28759 layer_factory.hpp:77] Creating layer relu7
I1222 18:54:49.384275 28759 net.cpp:84] Creating Layer relu7
I1222 18:54:49.384279 28759 net.cpp:406] relu7 <- fc7
I1222 18:54:49.384284 28759 net.cpp:367] relu7 -> fc7 (in-place)
I1222 18:54:49.384727 28759 net.cpp:122] Setting up relu7
I1222 18:54:49.384737 28759 net.cpp:129] Top shape: 128 4096 (524288)
I1222 18:54:49.384739 28759 net.cpp:137] Memory required for data: 1044104192
I1222 18:54:49.384742 28759 layer_factory.hpp:77] Creating layer drop7
I1222 18:54:49.384747 28759 net.cpp:84] Creating Layer drop7
I1222 18:54:49.384750 28759 net.cpp:406] drop7 <- fc7
I1222 18:54:49.384754 28759 net.cpp:367] drop7 -> fc7 (in-place)
I1222 18:54:49.384804 28759 net.cpp:122] Setting up drop7
I1222 18:54:49.384810 28759 net.cpp:129] Top shape: 128 4096 (524288)
I1222 18:54:49.384814 28759 net.cpp:137] Memory required for data: 1046201344
I1222 18:54:49.384817 28759 layer_factory.hpp:77] Creating layer fc8
I1222 18:54:49.384826 28759 net.cpp:84] Creating Layer fc8
I1222 18:54:49.384830 28759 net.cpp:406] fc8 <- fc7
I1222 18:54:49.384836 28759 net.cpp:380] fc8 -> fc8
I1222 18:54:49.421114 28759 net.cpp:122] Setting up fc8
I1222 18:54:49.421138 28759 net.cpp:129] Top shape: 128 1000 (128000)
I1222 18:54:49.421140 28759 net.cpp:137] Memory required for data: 1046713344
I1222 18:54:49.421149 28759 layer_factory.hpp:77] Creating layer fc8_fc8_0_split
I1222 18:54:49.421160 28759 net.cpp:84] Creating Layer fc8_fc8_0_split
I1222 18:54:49.421166 28759 net.cpp:406] fc8_fc8_0_split <- fc8
I1222 18:54:49.421172 28759 net.cpp:380] fc8_fc8_0_split -> fc8_fc8_0_split_0
I1222 18:54:49.421185 28759 net.cpp:380] fc8_fc8_0_split -> fc8_fc8_0_split_1
I1222 18:54:49.421218 28759 net.cpp:122] Setting up fc8_fc8_0_split
I1222 18:54:49.421227 28759 net.cpp:129] Top shape: 128 1000 (128000)
I1222 18:54:49.421231 28759 net.cpp:129] Top shape: 128 1000 (128000)
I1222 18:54:49.421236 28759 net.cpp:137] Memory required for data: 1047737344
I1222 18:54:49.421238 28759 layer_factory.hpp:77] Creating layer phfc8
I1222 18:54:49.421244 28759 net.cpp:84] Creating Layer phfc8
I1222 18:54:49.421247 28759 net.cpp:406] phfc8 <- fc8_fc8_0_split_0
I1222 18:54:49.421253 28759 net.cpp:380] phfc8 -> phfc8
I1222 18:54:49.422202 28759 net.cpp:122] Setting up phfc8
I1222 18:54:49.422215 28759 net.cpp:129] Top shape: 128 7 (896)
I1222 18:54:49.422219 28759 net.cpp:137] Memory required for data: 1047740928
I1222 18:54:49.422230 28759 layer_factory.hpp:77] Creating layer smfc8
I1222 18:54:49.422236 28759 net.cpp:84] Creating Layer smfc8
I1222 18:54:49.422240 28759 net.cpp:406] smfc8 <- fc8_fc8_0_split_1
I1222 18:54:49.422245 28759 net.cpp:380] smfc8 -> smfc8
I1222 18:54:49.422338 28759 net.cpp:122] Setting up smfc8
I1222 18:54:49.422345 28759 net.cpp:129] Top shape: 128 2 (256)
I1222 18:54:49.422348 28759 net.cpp:137] Memory required for data: 1047741952
I1222 18:54:49.422353 28759 layer_factory.hpp:77] Creating layer phloss
I1222 18:54:49.422359 28759 net.cpp:84] Creating Layer phloss
I1222 18:54:49.422363 28759 net.cpp:406] phloss <- phfc8
I1222 18:54:49.422368 28759 net.cpp:406] phloss <- ph
I1222 18:54:49.422372 28759 net.cpp:380] phloss -> phloss
I1222 18:54:49.422384 28759 layer_factory.hpp:77] Creating layer phloss
I1222 18:54:49.423177 28759 net.cpp:122] Setting up phloss
I1222 18:54:49.423188 28759 net.cpp:129] Top shape: (1)
I1222 18:54:49.423192 28759 net.cpp:132]     with loss weight 1
I1222 18:54:49.423213 28759 net.cpp:137] Memory required for data: 1047741956
I1222 18:54:49.423218 28759 layer_factory.hpp:77] Creating layer smloss
I1222 18:54:49.423224 28759 net.cpp:84] Creating Layer smloss
I1222 18:54:49.423228 28759 net.cpp:406] smloss <- smfc8
I1222 18:54:49.423233 28759 net.cpp:406] smloss <- sm
I1222 18:54:49.423238 28759 net.cpp:380] smloss -> smloss
I1222 18:54:49.423243 28759 layer_factory.hpp:77] Creating layer smloss
I1222 18:54:49.423686 28759 net.cpp:122] Setting up smloss
I1222 18:54:49.423696 28759 net.cpp:129] Top shape: (1)
I1222 18:54:49.423698 28759 net.cpp:132]     with loss weight 1
I1222 18:54:49.423704 28759 net.cpp:137] Memory required for data: 1047741960
I1222 18:54:49.423707 28759 net.cpp:198] smloss needs backward computation.
I1222 18:54:49.423715 28759 net.cpp:198] phloss needs backward computation.
I1222 18:54:49.423719 28759 net.cpp:198] smfc8 needs backward computation.
I1222 18:54:49.423723 28759 net.cpp:198] phfc8 needs backward computation.
I1222 18:54:49.423727 28759 net.cpp:198] fc8_fc8_0_split needs backward computation.
I1222 18:54:49.423730 28759 net.cpp:198] fc8 needs backward computation.
I1222 18:54:49.423734 28759 net.cpp:198] drop7 needs backward computation.
I1222 18:54:49.423758 28759 net.cpp:198] relu7 needs backward computation.
I1222 18:54:49.423760 28759 net.cpp:198] fc7 needs backward computation.
I1222 18:54:49.423764 28759 net.cpp:198] drop6 needs backward computation.
I1222 18:54:49.423768 28759 net.cpp:198] relu6 needs backward computation.
I1222 18:54:49.423773 28759 net.cpp:198] fc6 needs backward computation.
I1222 18:54:49.423776 28759 net.cpp:198] pool5 needs backward computation.
I1222 18:54:49.423780 28759 net.cpp:198] relu5 needs backward computation.
I1222 18:54:49.423784 28759 net.cpp:198] conv5 needs backward computation.
I1222 18:54:49.423789 28759 net.cpp:198] relu4 needs backward computation.
I1222 18:54:49.423792 28759 net.cpp:198] conv4 needs backward computation.
I1222 18:54:49.423796 28759 net.cpp:198] relu3 needs backward computation.
I1222 18:54:49.423800 28759 net.cpp:198] conv3 needs backward computation.
I1222 18:54:49.423804 28759 net.cpp:198] pool2 needs backward computation.
I1222 18:54:49.423808 28759 net.cpp:198] norm2 needs backward computation.
I1222 18:54:49.423811 28759 net.cpp:198] relu2 needs backward computation.
I1222 18:54:49.423815 28759 net.cpp:198] conv2 needs backward computation.
I1222 18:54:49.423820 28759 net.cpp:198] pool1 needs backward computation.
I1222 18:54:49.423823 28759 net.cpp:198] norm1 needs backward computation.
I1222 18:54:49.423827 28759 net.cpp:198] relu1 needs backward computation.
I1222 18:54:49.423831 28759 net.cpp:198] conv1 needs backward computation.
I1222 18:54:49.423835 28759 net.cpp:200] data does not need backward computation.
I1222 18:54:49.423838 28759 net.cpp:242] This network produces output phloss
I1222 18:54:49.423843 28759 net.cpp:242] This network produces output smloss
I1222 18:54:49.423859 28759 net.cpp:255] Network initialization done.
caffe transformation hdf5
4

0 回答 0