Пример #1
0
DeepC2C2::DeepC2C2(int dimension, int l, int k, ActivationFunction fn, int nInputFeatures, int nClasses, float p, int cudaDevice, int nTop) : SparseConvNet(dimension,nInputFeatures, nClasses, cudaDevice, nTop) {
  for (int i=0;i<l;i++) {
    addLeNetLayerMP((i+1)*k,2,1,1,1,fn,p*i*1.0f/l);
    addLeNetLayerMP((i+1)*k,2,1,3,2,fn,p*i*1.0f/l);
  }
  addLeNetLayerMP((l+1)*k,2,1,1,1,fn,p);
  addLeNetLayerMP((l+1)*k,1,1,1,1,fn,p);
  addSoftmaxLayer();
}
Пример #2
0
ROFMPSparseConvNet::ROFMPSparseConvNet(int dimension, int l, int k, float fmpShrink, ActivationFunction fn, int nInputFeatures, int nClasses, float p, int cudaDevice, int nTop) : SparseConvNet(dimension,nInputFeatures, nClasses, cudaDevice,nTop) {
  for (int i=0;i<l;i++) {
    addLeNetLayerROFMP(k*(i+1),2,1,2,fmpShrink,fn,p*i/(l+2));
  }
  addLeNetLayerROFMP(k*(l+1),2,1,2,1.5,fn,p*l/(l+2));
  addLeNetLayerMP(k*(l+2),2,1,1,1,fn,p*(l+1)/(l+2));
  addLeNetLayerMP(k*(l+3),1,1,1,1,fn,p);
  addSoftmaxLayer();
}
Пример #3
0
DeepCNiN::DeepCNiN (int dimension, int l, int k, ActivationFunction fn, int nInputFeatures, int nClasses, float p, int cudaDevice, int nTop) : SparseConvNet(dimension,nInputFeatures, nClasses, cudaDevice, nTop) {
  for (int i=0;i<=l;i++) {
    addLeNetLayerMP((i+1)*k,
                    (i==0)?2:2,
                    1,
                    (i<l)?3:1,
                    (i<l)?2:1,
                    fn,
                    p*i*1.0f/l);
    addLeNetLayerMP(1,1,1,1,(i+1)*k,fn,p*i*1.0f/l);
  }
  addSoftmaxLayer();
}
Пример #4
0
 CNN (int dimension, int l, int k, ActivationFunction fn, int nInputFeatures, int nClasses, float p=0.0f, int cudaDevice=-1, int nTop=1) : SparseConvNet(dimension,nInputFeatures, nClasses, cudaDevice, nTop){
   for (int i=0;i<=l;i++)
     addLeNetLayerMP((i+1)*k*0+32<<i,
                     2+(i==0)+(i==l),
                     1,
                     (i<l)?3:1,
                     (i<l)?2:1,
                     fn,
                     p*i*1.0f/l);
   addSoftmaxLayer();
 }
ImagenetTriangular::ImagenetTriangular
(int dimension, ActivationFunction fn,
 int nInputFeatures, int nClasses, int cudaDevice, int nTop)
  : SparseConvTriangLeNet(dimension,nInputFeatures, nClasses, cudaDevice, nTop) {
  addLeNetLayerMP( 64,7,2,2,2,fn,0.0f,10);
  addLeNetLayerMP(128,3,1,3,2,fn,0.0f,4);
  addLeNetLayerMP(256,3,1,3,2,fn,0.0f,4);
  addLeNetLayerMP(384,3,1,3,2,fn,0.0f,4);
  addLeNetLayerMP(512,3,1,1,1,fn,0.0f,4);
  addTerminalPoolingLayer(32);
  addLeNetLayerMP(1024,1,1,1,1,fn);
  addSoftmaxLayer();
}