Ejemplo n.º 1
0
    void backPropagation(double y, double rate_learn)
    {
        /* 出力層の誤差を計算 */
        for(int i=0; i<n_out; i++){
            if(i == y) error_out[i] = output_out[i] - 1.0;
            else error_out[i] = output_out[i];
        }

        /* 隠れ層から出力層への重みを更新 */
        for(int i=0; i<n_hid; i++){
            for(int j=0; j<n_out; j++){
                weight_out[i][j] -= rate_learn * error_out[j] * output_hid[i];
            }
        }

        /* 隠れ層の誤差を計算 */
        for(int i=0; i<n_hid; i++){
            error_hid[i] = 0;
            for(int j=0; j<n_out; j++){
                error_hid[i] += weight_out[i][j] * error_out[j];
            }
            error_hid[i] *= d_sigmoid(input_hid[i]);
        }

        /* 入力層から隠れ層への重みを更新 */
        for(int i=0; i<n_in; i++){
            for(int j=0; j<n_hid; j++){
                weight_hid[i][j] -= rate_learn * error_hid[j] * output_in[i];
            }
        }
    }
Ejemplo n.º 2
0
void calculate_weight(Neuron* neuron) {
	double post_synaptic_error = 0;
	for(int i=0;i<10;i++) {
		Dendrite* d = neuron->outgoing_list[i];
		if(d) {
			Neuron* n = d->target;
			post_synaptic_error += (n->error * d->weight);
		}
	}

	neuron->error = post_synaptic_error;

	for(int i=0;i<10;i++) {
		Dendrite* d = neuron->outgoing_list[i];
		if(d) {
			d->weight = (d->weight + (d->target->error * d_sigmoid(d->target->error) * neuron->output));
			printf("Weight: %f\n", d->weight);
		}
	}
}