int sapi_stack_apply_with_argument_all(sapi_stack *stack, int type, int (*apply_function)(void *element, void *arg), void *arg) { int i, retval; switch (type) { case ZEND_STACK_APPLY_TOPDOWN: for (i=stack->top-1; i>=0; i--) { retval = apply_function(stack->elements[i], arg); } break; case ZEND_STACK_APPLY_BOTTOMUP: for (i=0; i<stack->top; i++) { retval = apply_function(stack->elements[i], arg); } break; } return retval; }
void computeTfceIteration(float h, float * map, int n, int dim_x, int dim_y, int dim_z, float E, float H, float dh, float * toReturn){ int i = 0, numOfElementsMatching = 0, j = 0; int * indexMatchingData = getBinaryVector(map, n, moreThan, h, &numOfElementsMatching); int num_clusters = 0; char string_h[10]; char default_path [50]; float * clustered_map_float; char * concatenated_string; int * clustered_map; int * extent_map; clustered_map = find_clusters_3D(indexMatchingData, dim_x, dim_y, dim_z, n, &num_clusters); extent_map = new int[n]; for (j = 0; j < n; ++j){ extent_map[j] = 0; } delete [] indexMatchingData; for (i = 1; i <= num_clusters; ++i) { numOfElementsMatching = 0; for (j = 0; j < n; ++j){ if(clustered_map[j] == i){ numOfElementsMatching++; } } for (j = 0; j < n; ++j) { if(clustered_map[j] == i){ extent_map[j] = numOfElementsMatching; } } } clustered_map_float = copyAndConvertIntVector(extent_map, n); apply_function(clustered_map_float, n, elevate, E); apply_function(clustered_map_float, n, multiply, pow(h, H)); apply_function(clustered_map_float, n, multiply, dh); for (i = 0; i < n; ++i) { #pragma omp atomic toReturn[i] += (clustered_map_float[i]); } delete[] clustered_map_float; delete[] clustered_map; delete[] extent_map; }
ZEND_API void zend_stack_apply_with_argument(zend_stack *stack, int type, int (*apply_function)(void *element, void *arg), void *arg) { int i; switch (type) { case ZEND_STACK_APPLY_TOPDOWN: for (i=stack->top-1; i>=0; i--) { if (apply_function(stack->elements[i], arg)) { break; } } break; case ZEND_STACK_APPLY_BOTTOMUP: for (i=0; i<stack->top; i++) { if (apply_function(stack->elements[i], arg)) { break; } } break; } }
ZEND_API void zend_stack_apply(zend_stack *stack, int type, int (*apply_function)(void *element)) { int i; switch (type) { case ZEND_STACK_APPLY_TOPDOWN: for (i=stack->top-1; i>=0; i--) { if (apply_function(ZEND_STACK_ELEMENT(stack, i))) { break; } } break; case ZEND_STACK_APPLY_BOTTOMUP: for (i=0; i<stack->top; i++) { if (apply_function(ZEND_STACK_ELEMENT(stack, i))) { break; } } break; } }
int sapi_stack_apply_with_argument_stop_if_http_error(sapi_stack *stack, int type, int (*apply_function)(void *element, void *arg), void *arg) { int i; int ret = DECLINED; switch (type) { case ZEND_STACK_APPLY_TOPDOWN: for (i=stack->top-1; i>=0; i--) { if ((ret = apply_function(stack->elements[i], arg)) > 0) { break; } } break; case ZEND_STACK_APPLY_BOTTOMUP: for (i=0; i<stack->top; i++) { if ((ret = apply_function(stack->elements[i], arg)) > 0) { break; } } break; } return ret; }
void Neural_network::train(const std::vector<double>& inputs, const std::vector<double>& targets) { // Feed forward auto inputs_mat = Matrix::from_vector(inputs); auto hidden_mat = Matrix::multiply(input_to_hidden_weights, inputs_mat); if(is_bias_on) hidden_mat.add(bias_hidden); hidden_mat.apply_function(activation_function_hid); auto output_mat = Matrix::multiply(hidden_to_output_weights, hidden_mat); if(is_bias_on) output_mat.add(bias_output); output_mat.apply_function(activation_function_out); auto targets_mat = Matrix::from_vector(targets); // Convert goals to a matrix of targets // Output layer errors auto output_errors = Matrix::subtract(targets_mat, output_mat); // Create an output error matrix // Compute MSE current_MSE += Matrix::compute_MSE(output_errors); // Gradient - output auto output_gradients = Matrix::apply_function(output_mat, activation_function_out_derivative); output_gradients.hadamard_product(output_errors); output_gradients.multiply(learning_rate); // Deltas - output layer auto hidden_transposed = Matrix::transpose(hidden_mat); auto hidden_to_output_weights_deltas = Matrix::multiply(output_gradients, hidden_transposed); auto bias_output_deltas = output_gradients; // In case of biases their deltas are just the gradients. // Hidden_to_output_weights tweaking hidden_to_output_weights_previous_deltas.multiply(momentum_coefficient); hidden_to_output_weights_deltas.add(hidden_to_output_weights_previous_deltas); hidden_to_output_weights_previous_deltas = hidden_to_output_weights_deltas; hidden_to_output_weights.add(hidden_to_output_weights_deltas); // Output bias tweaking if(is_bias_on) { bias_output_previous_deltas.multiply(momentum_coefficient); bias_output_deltas.add(bias_output_previous_deltas); bias_output_previous_deltas = bias_output_deltas; bias_output.add(bias_output_deltas); // The actual tweaking. } // Hidden layer errors auto hidden_to_output_weights_transposed = Matrix::transpose(hidden_to_output_weights); auto hidden_errors = Matrix::multiply(hidden_to_output_weights_transposed, output_errors); // Gradient - hidden auto hidden_gradients = Matrix::apply_function(hidden_mat, activation_function_hid_derivative); hidden_gradients.hadamard_product(hidden_errors); hidden_gradients.multiply(learning_rate); // Deltas - hidden layer auto inputs_transposed = Matrix::transpose(inputs_mat); auto input_to_hidden_weights_deltas = Matrix::multiply(hidden_gradients, inputs_transposed); auto bias_hidden_deltas = hidden_gradients; // In case of biases their deltas are just the gradients. // Input_to_hidden_weights tweaking input_to_hidden_weights_previous_deltas.multiply(momentum_coefficient); input_to_hidden_weights_deltas.add(input_to_hidden_weights_previous_deltas); input_to_hidden_weights_previous_deltas = input_to_hidden_weights_deltas; input_to_hidden_weights.add(input_to_hidden_weights_deltas); // Hidden bias tweaking if(is_bias_on) { bias_hidden_previous_deltas.multiply(momentum_coefficient); bias_hidden_deltas.add(bias_hidden_previous_deltas); bias_hidden_previous_deltas = bias_hidden_deltas; bias_hidden.add(bias_hidden_deltas); // The actual tweaking. } }
Status parse(const Token *tokens, Stack **operands, Stack **operators, Stack **functions) { Status status = OK; const Token *token, *previous, *next; for (token = tokens, previous = &NO_TOKEN, next = token + 1; token->type != TOKEN_NONE; previous = token, token = next++) { switch (token->type) { case TOKEN_OPEN_PARENTHESIS: { // Implicit multiplication: "(2)(2)". if (previous->type == TOKEN_CLOSE_PARENTHESIS) { status = push_multiplication(operands, operators); } stack_push(operators, get_operator('(', OPERATOR_OTHER)); break; } case TOKEN_CLOSE_PARENTHESIS: { // Apply operators until the previous open parenthesis is found. bool found_parenthesis = false; while (*operators && status == OK && !found_parenthesis) { const Operator *operator = stack_pop(operators); if (operator->symbol == '(') { found_parenthesis = true; } else { status = apply_operator(operator, operands); } } if (!found_parenthesis) { status = ERROR_CLOSE_PARENTHESIS; } else if (*functions) { status = apply_function(stack_pop(functions), operands); } break; } case TOKEN_OPERATOR: { status = push_operator( get_operator(*token->value, get_arity(*token->value, previous)), operands, operators); break; } case TOKEN_NUMBER: { if (previous->type == TOKEN_CLOSE_PARENTHESIS || previous->type == TOKEN_NUMBER || previous->type == TOKEN_IDENTIFIER) { status = ERROR_SYNTAX; } else { status = push_number(token->value, operands); // Implicit multiplication: "2(2)" or "2a". if (next->type == TOKEN_OPEN_PARENTHESIS || next->type == TOKEN_IDENTIFIER) { status = push_multiplication(operands, operators); } } break; } case TOKEN_IDENTIFIER: { // The identifier could be either a constant or function. status = push_constant(token->value, operands); if (status == ERROR_UNDEFINED_CONSTANT && next->type == TOKEN_OPEN_PARENTHESIS) { stack_push(functions, token->value); status = OK; } else if (next->type == TOKEN_OPEN_PARENTHESIS || next->type == TOKEN_IDENTIFIER) { // Implicit multiplication: "a(2)" or "a b". status = push_multiplication(operands, operators); } break; } default: { status = ERROR_UNRECOGNIZED; } } if (status != OK) { return status; } } // Apply all remaining operators. while (*operators && status == OK) { const Operator *operator = stack_pop(operators); if (operator->symbol == '(') { status = ERROR_OPEN_PARENTHESIS; } else { status = apply_operator(operator, operands); } } return status; }