// OPERATION 1 // void update1( int i , int b , int c , int s , int e ,int v){ if(i>=size)return; propogate( i , b , c ); if ( ( e < b) || ( c < s) ) { return ; } if ( ( s <= b) && ( c <= e) ) { tree[i]=(tree[i]*v)%mod ; if ( b != c) { propogate(2*i,b,(b+c)/2); propogate(2*i+1,((b+c)/2)+1,c); lazy[ 2*i + 1].y = 1 ; lazy[ 2*i].y = 1 ; lazy[ 2*i].a1 = (lazy[2*i].a1* (ll)v)%mod ; lazy[ 2*i+1].a1 = (lazy[2*i+1].a1* (ll)v)%mod ; propogate(2*i,b,(b+c)/2); propogate(2*i+1,((b+c)/2)+1,c); //update1( 2*i , b , (b + c )/ 2 , s , e , v ); //update1( 2*i + 1 , ( (b + c )/ 2 + 1 ) , c , s , e , v ); } return ; } update1( 2*i , b , (b + c )/ 2 , s , e , v ); update1( 2*i + 1 , ( (b + c )/ 2 + 1 ) , c , s , e , v ); tree[i] = (tree[2*i] + tree[2*i + 1])%mod; }
static puzzle_t *search(puzzle_t *puz) { if (!propogate(puz)) { return NULL; } if (solved(puz)) { return puz; } // find best candidate square_t *best = find_search_candidate(puz); char *old_vals = strndup(best->vals, NUM_DIGITS); assert(strlen(old_vals) == strlen(best->vals)); int val; for (val = 0; val < strlen(old_vals); val++) { puzzle_t *copy = copy_puzzle(puz); assert(copy != puz); copy->squares[best->row][best->col].vals[0] = old_vals[val]; copy->squares[best->row][best->col].vals[1] = '\0'; puzzle_t *return_puz; if ((return_puz = search(copy)) != NULL) { if (!solved(copy)) { //free_puzzle(copy); if ((puz != return_puz) && (puz != copy)) { free_puzzle(puz); } else if ((copy != puz) && (copy != return_puz)) { free_puzzle(copy); } } //free_puzzle(copy); return return_puz; } free_puzzle(copy); } return NULL; }
void initWeights() { // Initialize weights between input-hidden for (int i = 0; i<numIn; i++) { for (int j = 0; j<numHid; j++) { //weight = random float between - 1 and 1 weightInHid[i][j] = (((float) rand()/(float)(RAND_MAX+1))*2)-1; } } // Initialize weights between hidden-output for (i = 0; i <numHid; i++) { //weight = random float between -1 and 1 weightHidOut[i][j] = (((float) rand() / (float)(RAND_MAX+1)) *2)-1; } return; // Training loop while (iterations <= N) { // For each example in the training set for (int i = 0; i<numTrain; i++) { Input[j] = TrainingIn[i][j]; } //Propogate; propogate(); //Backpropapgate error backpropapgate(i); } }
// OPERATION 3 // void update3( int i , int b , int c , int s , int e , int v ){ propogate( i , b , c ); if ( ( e < b) || ( c < s) ) { return ; } if ( ( s <= b) && ( c <= e) ) { tree[i] = ( (ll)( c - b + 1) * (ll) v )%mod ; if ( b != c) { lazy[ 2*i].a3 = 1 ; lazy[ 2*i].x= (ll) v; lazy[ 2*i].a1= 1ll; lazy[ 2*i].d= 0; lazy[ 2*i].a2= 0; lazy[ 2*i].y = 1 ; lazy[ 2*i + 1].y = 1 ; lazy[ 2*i + 1].a3 = 1; lazy[2*i + 1].x = (ll)v; lazy[2*i + 1].a1 = 1ll; lazy[2*i + 1].d = 0; lazy[2*i + 1].a2 = 0; } return ; } update3( 2*i , b , (b + c )/ 2 , s , e , v ); update3( 2*i + 1 , ( (b + c )/ 2 + 1 ) , c , s , e , v ); tree[i] = (tree[2*i] + tree[2*i + 1])%mod; }
// OPERATION 4 // ll query( int i , int b , int c , int s ,int e){ propogate(i , b , c); ll x , y ; if ( ( e < b) || ( s > c) ) { return 0 ; } if ( (s <=b) && ( c<= e)) { return tree[i]%mod; } x = query( 2 * i , b , ( b + c )/ 2 , s ,e ); y = query( 2*i + 1 , ((b+c)/2 + 1 ) , c , s , e ); return ( x + y )%mod; }
// THIS IS MADE FOR CHECKING WHETHER NECCESARY UPDATES ARE TO DONE BEFORE NEXT UPDATES// void propogate( int i , int b , int c){ if(i>=size)return ; if ( lazy[i].y == 0 ){ return ; } if ( lazy[i].a3 == 1){ tree[i] = ((ll) (c - b + 1)*(lazy[i].x))%mod ; if ( b != c){ if(lazy[2*i].y!=0) propogate(2*i,b,(b+c)/2); if(lazy[2*i+1].y!=0)propogate(2*i+1,((b+c)/2)+1,c); lazy[ 2*i].a3 = 1; lazy[ 2*i].x= lazy[i].x; lazy[ 2*i].y = 1 ; lazy[ 2*i + 1].y = 1 ; lazy[ 2*i + 1].a3 = lazy[i].a3; lazy[2*i + 1].x = lazy[i].x; lazy[ 2*i].a3 = 1 ; lazy[ 2*i].a1= 1ll; lazy[ 2*i].d= 0; lazy[ 2*i].a2= 0; lazy[2*i + 1].a1 = 1ll; lazy[2*i + 1].d = 0; lazy[2*i + 1].a2 = 0; } } if ( lazy[i].a1!=1) { tree[i]=(tree[i]*lazy[i].a1)%mod ; if ( b != c) { if(lazy[2*i].y!=0) propogate(2*i,b,(b+c)/2); if(lazy[2*i+1].y!=0)propogate(2*i+1,((b+c)/2)+1,c); lazy[ 2*i + 1].y = 1; lazy[ 2*i].y = 1 ; lazy[ 2*i].a1= (lazy[2*i].a1*lazy[i].a1)%mod ; lazy[ 2*i+1].a1= (lazy[2*i+1].a1*lazy[i].a1)%mod ; } } if ( lazy[i].a2!=0) { tree[i]= (tree[i]+ ( (ll) (c - b + 1)*(lazy[i].a2))%mod)%mod ; if ( b != c) { if(lazy[2*i].y!=0) propogate(2*i,b,(b+c)/2); if(lazy[2*i+1].y!=0)propogate(2*i+1,((b+c)/2)+1,c); lazy[ 2*i].y = 1 ; lazy[ 2*i].a2= (lazy[2*i].a2+lazy[i].a2)%mod; lazy[ 2*i + 1].y = 1 ; lazy[ 2*i+1].a2= (lazy[2*i+1].a2+lazy[i].a2)%mod; } } clear(i); }
void NeuralNetworkSimpleInternal::backProp(NumericalArray<float> const& inputVector, NumericalArray<float> const& targetVector) throw() { NumericalArray<float> outputVector = propogate(inputVector); float* errorVectorPtr = errorVector.getArray(); const float* targetVectorPtr = targetVector.getArray(); const float* outputVectorPtr = outputVector.getArray(); const int size = errorVector.size(); for(int i = 0; i < size; i++) { errorVectorPtr[i] = targetVectorPtr[i] - outputVectorPtr[i]; } NumericalArray<float> vector = errorVector; NeuralLayer *layersPtr = layers.getArray(); for(int i = getNumLayersExcludingInput()-1; i >= 0; i--) { vector = layersPtr[i].backProp(vector, actFuncOffset, learnRate); } }