コード例 #1
0
ファイル: conv_layer.hpp プロジェクト: shenzebang/mlpack
/**
 * Create the ConvLayer object using the specified number of input maps,
 * output maps, filter size, stride and padding parameter.
 *
 * @param inMaps The number of input maps.
 * @param outMaps The number of output maps.
 * @param wfilter Width of the filter/kernel.
 * @param wfilter Height of the filter/kernel.
 * @param xStride Stride of filter application in the x direction.
 * @param yStride Stride of filter application in the y direction.
 * @param wPad Spatial padding width of the input.
 * @param hPad Spatial padding height of the input.
 * @param WeightInitRule The weight initialization rule used to initialize the
 *        weight matrix.
 */
ConvLayer(const size_t inMaps,
          const size_t outMaps,
          const size_t wfilter,
          const size_t hfilter,
          const size_t xStride = 1,
          const size_t yStride = 1,
          const size_t wPad = 0,
          const size_t hPad = 0,
          WeightInitRule weightInitRule = WeightInitRule()) :
    wfilter(wfilter),
    hfilter(hfilter),
    inMaps(inMaps),
    outMaps(outMaps),
    xStride(xStride),
    yStride(yStride),
    wPad(wPad),
    hPad(hPad),
    optimizer(new OptimizerType<ConvLayer<OptimizerType,
              WeightInitRule,
              ForwardConvolutionRule,
              BackwardConvolutionRule,
              GradientConvolutionRule,
              InputDataType,
              OutputDataType>,
              OutputDataType>(*this)),
    ownsOptimizer(true)
{
    weightInitRule.Initialize(weights, wfilter, hfilter, inMaps * outMaps);
}
コード例 #2
0
ファイル: full_connection.hpp プロジェクト: CodeFuck/mlpack
 /**
  * Create the FullConnection object using the specified input layer, output
  * layer, optimizer and weight initialize rule.
  *
  * @param InputLayerType The input layer which is connected with the output
  * layer.
  * @param OutputLayerType The output layer which is connected with the input
  * layer.
  * @param OptimizerType The optimizer used to update the weight matrix.
  * @param WeightInitRule The weight initialize rule used to initialize the
  * weight matrix.
  */
 FullConnection(InputLayerType& inputLayer,
                OutputLayerType& outputLayer,
                OptimizerType& optimizer,
                WeightInitRule weightInitRule = WeightInitRule()) :
     inputLayer(inputLayer), outputLayer(outputLayer), optimizer(optimizer)
 {
   weightInitRule.Initialize(weights, outputLayer.InputSize(),
       inputLayer.OutputSize());
 }
コード例 #3
0
ファイル: self_connection.hpp プロジェクト: ashvant/mlpack
 /**
  * Create the SelfConnection object using the specified input layer, output
  * layer, optimizer and weight initialize rule.
  *
  * @param InputLayerType The input layer which is connected with the output
  * layer.
  * @param OutputLayerType The output layer which is connected with the input
  * layer.
  * @param OptimizerType The optimizer used to update the weight matrix.
  * @param WeightInitRule The weight initialize rule used to initialize the
  * weight matrix.
  */
 SelfConnection(InputLayerType& inputLayer,
                OutputLayerType& outputLayer,
                OptimizerType& optimizer,
                WeightInitRule weightInitRule = WeightInitRule()) :
     inputLayer(inputLayer),
     outputLayer(outputLayer),
     optimizer(optimizer),
     connection(1 - arma::eye<MatType>(inputLayer.OutputSize(),
         inputLayer.OutputSize()))
 {
   weightInitRule.Initialize(weights, outputLayer.InputSize(),
       inputLayer.OutputSize());
 }
コード例 #4
0
ファイル: bias_layer.hpp プロジェクト: shenzebang/mlpack
 /**
  * Create the BiasLayer object using the specified number of units and bias
  * parameter.
  *
  * @param outSize The number of output units.
  * @param bias The bias value.
  * @param WeightInitRule The weight initialization rule used to initialize the
  *        weight matrix.
  */
 BiasLayer(const size_t outSize,
           const double bias = 1,
           WeightInitRule weightInitRule = WeightInitRule()) :
     outSize(outSize),
     bias(bias),
     optimizer(new OptimizerType<BiasLayer<OptimizerType,
                                           WeightInitRule,
                                           InputDataType,
                                           OutputDataType>,
                                           InputDataType>(*this)),
     ownsOptimizer(true)
 {
   weightInitRule.Initialize(weights, outSize, 1);
 }
コード例 #5
0
ファイル: self_connection.hpp プロジェクト: riveridea/mlpack
 /**
  * Create the SelfConnection object using the specified input layer, output
  * layer, optimizer and weight initialize rule.
  *
  * @param InputLayerType The input layer which is connected with the output
  * layer.
  * @param OutputLayerType The output layer which is connected with the input
  * layer.
  * @param OptimizerType The optimizer used to update the weight matrix.
  * @param WeightInitRule The weight initialize rule used to initialize the
  * weight matrix.
  */
 SelfConnection(InputLayerType& inputLayer,
                OutputLayerType& outputLayer,
                WeightInitRule weightInitRule = WeightInitRule()) :
     inputLayer(inputLayer),
     outputLayer(outputLayer),
     optimizer(new OptimizerType<SelfConnection<InputLayerType,
                                                OutputLayerType,
                                                OptimizerType,
                                                WeightInitRule,
                                                MatType,
                                                VecType>, MatType>(*this)),
     ownsOptimizer(true),
     connection(1 - arma::eye<MatType>(inputLayer.OutputSize(),
         inputLayer.OutputSize()))
 {
   weightInitRule.Initialize(weights, outputLayer.InputSize(),
       inputLayer.OutputSize());
 }
コード例 #6
0
ファイル: conv_connection.hpp プロジェクト: suspy/mlpack
 /**
  * Create the ConvConnection object using the specified input layer, output
  * layer, filter size and weight initialization rule.
  *
  * @param InputLayerType The input layer which is connected with the output
  * layer.
  * @param OutputLayerType The output layer which is connected with the input
  * layer.
  * @param filterSize the size of the filter.
  * @param WeightInitRule The weights initialization rule used to initialize
  * the weights matrix.
  */
 ConvConnection(InputLayerType& inputLayer,
                OutputLayerType& outputLayer,
                const size_t filterSize,
                WeightInitRule weightInitRule = WeightInitRule()) :
     inputLayer(inputLayer),
     outputLayer(outputLayer),
     optimizer(new OptimizerType<ConvConnection<InputLayerType,
                                                OutputLayerType,
                                                OptimizerType,
                                                WeightInitRule,
                                                ForwardConvolutionRule,
                                                BackwardConvolutionRule,
                                                GradientConvolutionRule,
                                                DataType>, DataType>(*this)),
     ownsOptimizer(true)
 {
   weightInitRule.Initialize(weights, filterSize, filterSize,
       inputLayer.OutputMaps() * outputLayer.OutputMaps());
 }
コード例 #7
0
ファイル: conv_connection.hpp プロジェクト: suspy/mlpack
 /**
  * Create the ConvConnection object using the specified input layer, output
  * layer, filter size, optimizer and weight initialization rule.
  *
  * @param InputLayerType The input layer which is connected with the output
  * layer.
  * @param OutputLayerType The output layer which is connected with the input
  * layer.
  * @param filterSize the size of the filter.
  * @param OptimizerType The optimizer used to update the weight matrix.
  * @param WeightInitRule The weights initialization rule used to initialize
  * the weights matrix.
  */
 ConvConnection(InputLayerType& inputLayer,
                OutputLayerType& outputLayer,
                const size_t filterSize,
                OptimizerType<ConvConnection<InputLayerType,
                                             OutputLayerType,
                                             OptimizerType,
                                             WeightInitRule,
                                             ForwardConvolutionRule,
                                             BackwardConvolutionRule,
                                             GradientConvolutionRule,
                                             DataType>, DataType>& optimizer,
                WeightInitRule weightInitRule = WeightInitRule()) :
     inputLayer(inputLayer),
     outputLayer(outputLayer),
     optimizer(&optimizer),
     ownsOptimizer(false)
 {
   weightInitRule.Initialize(weights, filterSize, filterSize,
       outputLayer.LayerSlices());
 }
コード例 #8
0
ファイル: lstm_layer.hpp プロジェクト: riveridea/mlpack
  /**
   * Create the LSTMLayer object using the specified parameters.
   *
   * @param layerSize The number of memory cells.
   * @param layerSize The length of the input sequence.
   * @param peepholes The flag used to indicate if peephole connections should
   * be used (Default: true).
   * @param WeightInitRule The weight initialize rule used to initialize the
   * peephole connection matrix.
   */
  LSTMLayer(const size_t layerSize,
            const size_t seqLen = 1,
            const bool peepholes = false,
            WeightInitRule weightInitRule = WeightInitRule()) :
      inputActivations(arma::zeros<VecType>(layerSize * 4)),
      layerSize(layerSize),
      seqLen(seqLen),
      inGate(arma::zeros<MatType>(layerSize, seqLen)),
      inGateAct(arma::zeros<MatType>(layerSize, seqLen)),
      inGateError(arma::zeros<MatType>(layerSize, seqLen)),
      outGate(arma::zeros<MatType>(layerSize, seqLen)),
      outGateAct(arma::zeros<MatType>(layerSize, seqLen)),
      outGateError(arma::zeros<MatType>(layerSize, seqLen)),
      forgetGate(arma::zeros<MatType>(layerSize, seqLen)),
      forgetGateAct(arma::zeros<MatType>(layerSize, seqLen)),
      forgetGateError(arma::zeros<MatType>(layerSize, seqLen)),
      state(arma::zeros<MatType>(layerSize, seqLen)),
      stateError(arma::zeros<MatType>(layerSize, seqLen)),
      cellAct(arma::zeros<MatType>(layerSize, seqLen)),
      offset(0),
      peepholes(peepholes)
  {
    if (peepholes)
    {
      weightInitRule.Initialize(inGatePeepholeWeights, layerSize, 1);
      inGatePeepholeDerivatives = arma::zeros<VecType>(layerSize);
      inGatePeepholeOptimizer = std::unique_ptr<OptimizerType>(
      new OptimizerType(1, layerSize));

      weightInitRule.Initialize(forgetGatePeepholeWeights, layerSize, 1);
      forgetGatePeepholeDerivatives = arma::zeros<VecType>(layerSize);
      forgetGatePeepholeOptimizer = std::unique_ptr<OptimizerType>(
      new OptimizerType(1, layerSize));

      weightInitRule.Initialize(outGatePeepholeWeights, layerSize, 1);
      outGatePeepholeDerivatives = arma::zeros<VecType>(layerSize);
      outGatePeepholeOptimizer = std::unique_ptr<OptimizerType>(
      new OptimizerType(1, layerSize));
    }
  }