Ejemplo n.º 1
0
static void getMKLDNNConcatConfig(TestConfig& cfg,
                                  const std::vector<testImageDesc>& inputs) {
  CHECK_GE(inputs.size(), 2UL) << "at least two inputs";
  int oc = inputs[0].ic;
  for (size_t i = 1; i < inputs.size(); ++i) {
    CHECK_EQ(inputs[i].bs, inputs[0].bs);
    CHECK_EQ(inputs[i].ih, inputs[0].ih);
    CHECK_EQ(inputs[i].iw, inputs[0].iw);
    oc += inputs[i].ic;
  }
  cfg.biasSize = 0;
  cfg.layerConfig.set_type("mkldnn_concat");
  cfg.layerConfig.set_size(oc * inputs[0].ih * inputs[0].iw);
  cfg.layerConfig.set_active_type("relu");
  for (size_t i = 0; i < inputs.size(); ++i) {
    std::stringstream ss;
    ss << "layer_" << i;
    cfg.inputDefs.push_back(
        {INPUT_DATA,
         ss.str(),
         (size_t)(inputs[i].ic) * inputs[i].ih * inputs[i].iw,
         0});
    LayerInputConfig* input = cfg.layerConfig.add_inputs();
    ImageConfig* img_conf = input->mutable_image_conf();
    img_conf->set_channels(inputs[i].ic);
    img_conf->set_img_size_y(inputs[i].ih);
    img_conf->set_img_size(inputs[i].iw);
  }
}
Ejemplo n.º 2
0
static void getAddtoConfig(TestConfig& cfg,
                           const testImageDesc& pm,
                           const size_t nInputs = 1) {
  cfg.biasSize = 0;
  cfg.layerConfig.set_type("addto");
  size_t layerSize = pm.ic * pm.ih * pm.iw;
  cfg.layerConfig.set_size(layerSize);
  cfg.layerConfig.set_active_type("relu");
  for (size_t i = 0; i < nInputs; ++i) {
    std::stringstream ss;
    ss << "layer_" << i;
    cfg.inputDefs.push_back({INPUT_DATA, ss.str(), layerSize, 0});
    LayerInputConfig* input = cfg.layerConfig.add_inputs();
    ImageConfig* img_conf = input->mutable_image_conf();
    img_conf->set_channels(pm.ic);
    img_conf->set_img_size_y(pm.ih);
    img_conf->set_img_size(pm.iw);
  }
}
Ejemplo n.º 3
0
static void getMKLDNNBatchNormConfig(TestConfig& cfg,
                                     const testBatchNormDesc& pm) {
  cfg.layerConfig.set_size(pm.ic * pm.ih * pm.iw);
  cfg.layerConfig.set_type("mkldnn_batch_norm");
  cfg.biasSize = pm.ic;
  cfg.inputDefs.push_back(
      {INPUT_DATA,
       "layer_0",
       /* size of input layer= */ size_t(pm.ic * pm.ih * pm.iw),
       /* size of weight= */ size_t(pm.ic)});
  cfg.inputDefs.push_back(
      {INPUT_DATA, "layer_1_moving_mean", 1, size_t(pm.ic)});
  cfg.inputDefs.back().isStatic = true;
  cfg.inputDefs.push_back({INPUT_DATA, "layer_2_moving_var", 1, size_t(pm.ic)});
  cfg.inputDefs.back().isStatic = true;
  LayerInputConfig* input = cfg.layerConfig.add_inputs();
  cfg.layerConfig.set_active_type("relu");
  cfg.layerConfig.add_inputs();
  cfg.layerConfig.add_inputs();
  ImageConfig* img_conf = input->mutable_image_conf();
  img_conf->set_channels(pm.ic);
  img_conf->set_img_size_y(pm.ih);
  img_conf->set_img_size(pm.iw);
}