void gmskframegen_encode_header(gmskframegen _q, unsigned char * _header) { // first 'n' bytes user data memmove(_q->header_dec, _header, GMSKFRAME_H_USER); unsigned int n = GMSKFRAME_H_USER; // first byte is for expansion/version validation _q->header_dec[n+0] = GMSKFRAME_VERSION; // add payload length _q->header_dec[n+1] = (_q->dec_msg_len >> 8) & 0xff; _q->header_dec[n+2] = (_q->dec_msg_len ) & 0xff; // add CRC, forward error-correction schemes // CRC : most-significant 3 bits of [n+4] // fec0 : least-significant 5 bits of [n+4] // fec1 : least-significant 5 bits of [n+5] _q->header_dec[n+3] = (_q->check & 0x07) << 5; _q->header_dec[n+3] |= (_q->fec0) & 0x1f; _q->header_dec[n+4] = (_q->fec1) & 0x1f; // run packet encoder packetizer_encode(_q->p_header, _q->header_dec, _q->header_enc); // scramble header scramble_data(_q->header_enc, GMSKFRAME_H_ENC); #if 0 printf(" header_enc :"); unsigned int i; for (i=0; i<GMSKFRAME_H_ENC; i++) printf(" %.2X", _q->header_enc[i]); printf("\n"); #endif }
// helper function to keep code base small void liquid_scramble_test(unsigned int _n) { unsigned char x[_n]; // input data unsigned char y[_n]; // scrambled data unsigned char z[_n]; // unscrambled data unsigned int i; // initialize data array for (i=0; i<_n; i++) x[i] = 0x00; // scramble input memmove(y,x,_n); scramble_data(y,_n); // unscramble result memmove(z,y,_n); unscramble_data(z,_n); // ensure data are equivalent CONTEND_SAME_DATA(x,z,_n*sizeof(unsigned char)); // compute entropy metric float H = liquid_scramble_test_entropy(y,_n); CONTEND_EXPRESSION( H > 0.8f ); }
// test unscrambling of soft bits (helper function to keep code base small) void liquid_scramble_soft_test(unsigned int _n) { unsigned char msg_org[_n]; // input data unsigned char msg_enc[_n]; // scrambled data unsigned char msg_soft[8*_n]; // scrambled data (soft bits) unsigned char msg_dec[_n]; // unscrambled data unsigned int i; // initialize data array (random) for (i=0; i<_n; i++) msg_org[i] = rand() & 0xff; // scramble input memmove(msg_enc, msg_org, _n); scramble_data(msg_enc,_n); // convert to soft bits for (i=0; i<_n; i++) liquid_unpack_soft_bits(msg_enc[i], 8, &msg_soft[8*i]); // unscramble result unscramble_data_soft(msg_soft, _n); // unpack soft bits for (i=0; i<_n; i++) { unsigned int sym_out; liquid_pack_soft_bits(&msg_soft[8*i], 8, &sym_out); msg_dec[i] = sym_out; } // ensure data are equivalent CONTEND_SAME_DATA(msg_org, msg_dec, _n); }
// encode header void ofdmflexframegen_encode_header(ofdmflexframegen _q) { // first 'n' bytes user data unsigned int n = OFDMFLEXFRAME_H_USER; // first byte is for expansion/version validation _q->header[n+0] = OFDMFLEXFRAME_VERSION; // add payload length _q->header[n+1] = (_q->payload_dec_len >> 8) & 0xff; _q->header[n+2] = (_q->payload_dec_len ) & 0xff; // add modulation scheme/depth (pack into single byte) _q->header[n+3] = _q->props.mod_scheme; // add CRC, forward error-correction schemes // CRC : most-significant 3 bits of [n+4] // fec0 : least-significant 5 bits of [n+4] // fec1 : least-significant 5 bits of [n+5] _q->header[n+4] = (_q->props.check & 0x07) << 5; _q->header[n+4] |= (_q->props.fec0) & 0x1f; _q->header[n+5] = (_q->props.fec1) & 0x1f; // run packet encoder packetizer_encode(_q->p_header, _q->header, _q->header_enc); // scramble header scramble_data(_q->header_enc, OFDMFLEXFRAME_H_ENC); #if 0 // print header (decoded) unsigned int i; printf("header tx (dec) : "); for (i=0; i<OFDMFLEXFRAME_H_DEC; i++) printf("%.2X ", _q->header[i]); printf("\n"); // print header (encoded) printf("header tx (enc) : "); for (i=0; i<OFDMFLEXFRAME_H_ENC; i++) printf("%.2X ", _q->header_enc[i]); printf("\n"); #endif }
// ------------------- MAIN ----------------- // // ------------------------------------------ // int main(){ srand((unsigned int)time(NULL)); Vector* training_data[MAX_INPUT_LENGHT]; Vector* teaching_data[MAX_INPUT_LENGHT]; for (int i = 0; i < MAX_INPUT_LENGHT; i++) { training_data[i] = new_vec(DIMENSION_INPUT+1); teaching_data[i] = new_vec(DIMENSION_OUTPUT); } size_t TRAINING_SET_SIZE = 0; TRAINING_SET_SIZE = read_input(training_data, teaching_data); // in_layer, out_layer, hid_layer_count, hid_layers Network* network = new_network(DIMENSION_INPUT, DIMENSION_OUTPUT, 2, 4, 4); // print_network(network); Vector*** best_weights = malloc((network->hidden_layers_count+1) * sizeof(Vector**)); for (size_t layer = 0; layer < network->hidden_layers_count; layer++) { best_weights[layer] = malloc(network->hidden_layers[layer]->size * sizeof(Vector*)); for (size_t neuron_id = 0; neuron_id < network->hidden_layers[layer]->size; neuron_id++) { best_weights[layer][neuron_id] = new_vec(network->hidden_layers[layer]->neurons[neuron_id]->weights->length); } } best_weights[network->hidden_layers_count] = malloc(network->output_layer->size * sizeof(Vector*)); for (size_t neuron_id = 0; neuron_id < network->output_layer->size; neuron_id++) { best_weights[network->hidden_layers_count][neuron_id] = new_vec(network->output_layer->neurons[neuron_id]->weights->length); } time_t time_at_beginning = time(0); double total_error_old = FLOAT_MAX; double total_error = 1.0; double minimum_error_achieved = FLOAT_MAX; double epsilon = 0.0001; size_t epoch_count = 0; while ((time(0) - time_at_beginning) < 30 && (total_error = error_total(network, training_data, teaching_data, TRAINING_SET_SIZE)) > epsilon) { if (minimum_error_achieved > total_error) { minimum_error_achieved = total_error; dump_weights(network, best_weights); // print_detailed_layer(network->hidden_layers[1]); } for (size_t i = 0; i < TRAINING_SET_SIZE; i++) { train_network_with_backprop(network, training_data[i], teaching_data[i]); } if (epoch_count % 1000 == 0) { // printf("Epochs count: %ld\n",epoch_count); if (fabs(total_error - total_error_old) < 0.001) { // printf("Shaking Weights!\n"); shake_weights(network); } total_error_old = total_error; // printf("Total error: %.15lf\n", total_error); } update_learning_rate(network, ++epoch_count); scramble_data(training_data, teaching_data, TRAINING_SET_SIZE); } // printf("Network training finished with a total error: %.15lf\n", total_error); // printf("Network training achieved a minimum total error: %.15lf\n", minimum_error_achieved); // print_detailed_layer(network->hidden_layers[1]); load_weights(network, best_weights); // print_detailed_layer(network->input_layer); // print_detailed_layer(network->hidden_layers[0]); // print_detailed_layer(network->hidden_layers[1]); // print_detailed_layer(network->output_layer); test_network(network); for (size_t layer = 0; layer < network->hidden_layers_count; layer++) { for (size_t neuron_id = 0; neuron_id < network->hidden_layers[layer]->size; neuron_id++) { delete_vec(best_weights[layer][neuron_id]); } } for (size_t neuron_id = 0; neuron_id < network->output_layer->size; neuron_id++) { delete_vec(best_weights[network->hidden_layers_count][neuron_id]); } delete_network(network); for (int i = 0; i < MAX_INPUT_LENGHT; i++) { delete_vec(training_data[i]); delete_vec(teaching_data[i]); } return EXIT_SUCCESS; }