コード例 #1
0
ファイル: SplitImpl.hpp プロジェクト: hfp/libxsmm
 virtual void forwardPropagate(TensorBuf *inp, vector<TensorBuf *>& outp)
 {
   switch(engine)
   {
     case XSMM:
       forwardPropagate(inp, outp, 0);
       break;
   }
 }
コード例 #2
0
float
SentimentTraining::computeDerivatives(int startIndex, int endIndex) {
    CudaInterface::fillParamMem(mModel.derivatives_d, 0);

    for (int i = startIndex; i < endIndex; i++) {
        forwardPropagate(mTrainTrees[i]);
    }

    for (int i = startIndex; i < endIndex; i++) {
        backPropagate(mTrainTrees[i]);
    }

    return 1.0;
}
コード例 #3
0
static void forwardPropagate_test() {
    float hiddenWeights[2][3] = {{-1, 1, -1}, {1, -1, 1}};
    float outputWeights[2][3] = {{0.5, 0.25, 0.25}, {0.25, -0.5, 0.75}};
    Node inputNodes[2];
    Node hiddenNodes[2] = {{hiddenWeights[0]}, {hiddenWeights[1]}};
    Node outputNode[2] = {{outputWeights[0]}, {outputWeights[1]}};
    Layer inputLayer = {inputNodes, 2};
    Layer hiddenLayer = {hiddenNodes, 2};
    Layer outputLayer = {outputNode, 2};

    inputLayer.nodes[0].output = 0.5; inputLayer.nodes[1].output = -0.5;
    forwardPropagate( &inputLayer, &hiddenLayer, &outputLayer );
    assert( fabs( 0.0089929 - outputLayer.nodes[0].output ) < 0.001 && "Output is not as expected" );
    assert( fabs( 0.0269728 - outputLayer.nodes[1].output ) < 0.001 && "Output is not as expected" );
}
コード例 #4
0
static void backPropagate_test( void ) {
    float hiddenWeights[2][3] = {{-1, 1, 0}, {1, -1, 0}};
    float outputWeights[2][3] = {{0.5, 0.25, 0}, {0.25, -0.5, 0}};
    Node inputNodes[2];
    Node hiddenNodes[2] = {{hiddenWeights[0]}, {hiddenWeights[1]}};
    Node outputNode[2] = {{outputWeights[0]}, {outputWeights[1]}};
    Node desired[2] = {{NULL, -0.5}, {NULL, -0.5}};
    Layer inputLayer = {inputNodes, 2};
    Layer hiddenLayer = {hiddenNodes, 2};
    Layer outputLayer = {outputNode, 2};
    Layer desiredLayer = {desired, 2};

    inputLayer.nodes[0].output = 0.5; inputLayer.nodes[1].output = -0.5;
    forwardPropagate( &inputLayer, &hiddenLayer, &outputLayer );
    backPropagate( &hiddenLayer, &outputLayer, &desiredLayer );

    assert( fabs( -0.3118693 - outputLayer.nodes[0].error ) < 0.001 && "First output node error is not as expected" );
    assert( fabs( 0.0162368 - outputLayer.nodes[1].error ) < 0.001 && "Second output node error is not as expected" );
    assert( fabs( -0.1518755 - hiddenLayer.nodes[0].error ) < 0.001 && "First node of hidden layer's output is not as expected" );
    assert( fabs( -0.0860857 - hiddenLayer.nodes[1].error ) < 0.001 && "Second node of hidden layer's output is not as expected" );
}
コード例 #5
0
int main( int argc, char *argv[] ) {
    int numInputs;
    Layer *hiddenLayer, *outputLayer;
    TestCase testCase;
    int i;

    initRand();

    if( !processArguments( argc, argv ) ) {
        fprintf( stderr, "Usage: main [-r, -t] [node definition file] [input file, training file]\n" );
        exit( EXIT_FAILURE );
    }

    numInputs = buildLayers( &hiddenLayer, &outputLayer );
    if( !numInputs ) {
        exit( EXIT_FAILURE );
    }

    getDefaultTestCase( numInputs, outputLayer->numNodes, &testCase );

    if( trainingFlag ) {
        for( i = 0; i < 1000; ++i ) {
            populateNextTestCase( &testCase );
            train( &testCase, hiddenLayer, outputLayer );
        }

        if( !persistWeights( numInputs, hiddenLayer, outputLayer ) ) {
            exit( EXIT_FAILURE );
        }
    } else {
        while( populateNextTestCase( &testCase ) == NEW_INPUT ) {
            forwardPropagate( testCase.inputs, hiddenLayer, outputLayer );
            printTestResults( testCase.inputs, outputLayer, testCase.desiredOutputs );
        }
    }

    exit( EXIT_SUCCESS );
}