bool ResizeLayer::init(const LayerMap& layerMap, const ParameterMap& parameterMap) { if (!Layer::init(layerMap, parameterMap)) return false; CHECK_EQ(1U, inputLayers_.size()); setNeedSequenceInfo(false); return true; }
bool KmaxSeqScoreLayer::init(const LayerMap& layerMap, const ParameterMap& parameterMap) { bool ret = Layer::init(layerMap, parameterMap); CHECK_EQ(1U, inputLayers_.size()); beamSize_ = config_.beam_size(); CHECK_GE(beamSize_, 1U); setNeedSequenceInfo(false); setNeedGradient(false); return ret; }
bool SequenceConcatLayer::init(const LayerMap& layerMap, const ParameterMap& parameterMap) { /* Initialize the basic parent class */ Layer::init(layerMap, parameterMap); // sequene concatenation layer should have exactly 2 inputs CHECK_EQ(2U, inputLayers_.size()); /* initialize biases_ */ if (biasParameter_.get() != NULL) { biases_ = std::unique_ptr<Weight>(new Weight(1, getSize(), biasParameter_)); } setNeedSequenceInfo(false); return true; }
bool ExpandLayer::init(const LayerMap& layerMap, const ParameterMap& parameterMap) { /* Initialize the basic parent class */ Layer::init(layerMap, parameterMap); CHECK_EQ(inputLayers_.size(), 2UL); /* initialize biases_ */ if (biasParameter_.get() != NULL) { biases_ = std::unique_ptr<Weight>(new Weight(1, getSize(), biasParameter_)); } // which sequence type of input[0] if (config_.trans_type() == "non-seq") { type_ = kNonSeq; } else if (config_.trans_type() == "seq") { type_ = kSeq; } else { LOG(FATAL) << "Unknown trans_type: " << config_.trans_type(); } setNeedSequenceInfo(false); return true; }