hd_private_key hd_private_key::generate_private_key(uint32_t i) const { if (!valid_) return hd_private_key(); data_chunk data; if (first_hardened_key <= i) data = build_data({to_byte(0x00), k_, to_big_endian(i)}); else data = build_data({K_, to_big_endian(i)}); const auto I = split(hmac_sha512_hash(data, c_)); // The child key ki is (parse256(IL) + kpar) mod n: ec_secret ki = k_; if (!ec_add(ki, I.L)) return hd_private_key(); hd_key_lineage lineage { lineage_.testnet, static_cast<uint8_t>(lineage_.depth + 1), fingerprint(), i }; return hd_private_key(ki, I.R, lineage); }
static void get_grey_data(DB *db, DB_TXN *txn) { int rc; rc = db->get(db, txn, &dbkey, &dbdata, 0); if (rc == DB_NOTFOUND) { touch_data(); build_data(); } else if (rc) { log_db_error("get failed", rc); jmperr("get failed"); } else { time_t ref_time; double age_max; if (triplet_data.pass_count) { ref_time = triplet_data.access_time; age_max = pass_max_idle; } else { ref_time = triplet_data.create_time; age_max = bloc_max_idle; } touch_data(); /* Expire IDLE records */ if (difftime(triplet_data.access_time, ref_time) > age_max) build_data(); } }
std::string secret_to_wif(const ec_secret& secret, bool compressed) { auto version = to_byte(payment_address::wif_version); data_chunk data; if (compressed) data = build_data({version, secret, to_byte(0x01)}, checksum_size); else data = build_data({version, secret}, checksum_size); append_checksum(data); return encode_base58(data); }
void main(void) { for (int l = 0; l < 10; l++) { build_data(); printf("%d\n", test_main(data)); } }
void main(void) { for (int l = 0; l < 1; l++) { build_data(); printf("calling the run"); run_test(); printf("called over"); } }
static int build_key (pskc_key_t * kp, xmlNodePtr keyp) { const char *id = pskc_get_key_id (kp); const char *alg = pskc_get_key_algorithm (kp); const char *issuer = pskc_get_key_issuer (kp); const char *userid = pskc_get_key_userid (kp); const char *keyprofileid = pskc_get_key_profileid (kp); const char *keyreference = pskc_get_key_reference (kp); const char *friendlyname = pskc_get_key_friendlyname (kp); xmlNodePtr key; int rc; key = xmlNewChild (keyp, NULL, BAD_CAST "Key", NULL); if (id && xmlNewProp (key, BAD_CAST "Id", BAD_CAST id) == NULL) return PSKC_XML_ERROR; if (alg && xmlNewProp (key, BAD_CAST "Algorithm", BAD_CAST alg) == NULL) return PSKC_XML_ERROR; if (issuer && xmlNewTextChild (key, NULL, BAD_CAST "Issuer", BAD_CAST issuer) == NULL) return PSKC_XML_ERROR; rc = build_algparm (kp, key); if (rc != PSKC_OK) return rc; if (keyprofileid && xmlNewTextChild (key, NULL, BAD_CAST "KeyProfileId", BAD_CAST keyprofileid) == NULL) return PSKC_XML_ERROR; if (keyreference && xmlNewTextChild (key, NULL, BAD_CAST "KeyReference", BAD_CAST keyreference) == NULL) return PSKC_XML_ERROR; if (friendlyname && xmlNewTextChild (key, NULL, BAD_CAST "FriendlyName", BAD_CAST friendlyname) == NULL) return PSKC_XML_ERROR; rc = build_data (kp, key); if (rc != PSKC_OK) return rc; if (userid && xmlNewTextChild (key, NULL, BAD_CAST "UserId", BAD_CAST userid) == NULL) return PSKC_XML_ERROR; rc = build_policy (kp, key); if (rc != PSKC_OK) return rc; return PSKC_OK; }
static t_tree *build_elem(int op, t_stat tmp) { t_tree *elem; t_tree *node; t_data *data; elem = NULL; data = build_data(&tmp, ".", how_option_create(op), choose_print(op)); node = ft_create_node_tree((void *)data); elem = ft_addnode(elem, node, NULL, data->cmp); return (elem); }
ec_secret create_nonce(ec_secret secret, hash_digest hash, unsigned index) { init.init(); hash_digest K {{ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }}; hash_digest V {{ 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01 }}; K = hmac_sha256_hash(build_data({V, to_byte(0x00), secret, hash}), K); V = hmac_sha256_hash(V, K); K = hmac_sha256_hash(build_data({V, to_byte(0x01), secret, hash}), K); V = hmac_sha256_hash(V, K); while (true) { V = hmac_sha256_hash(V, K); if (0 == index) return V; --index; K = hmac_sha256_hash(build_data({V, to_byte(0x00)}), K); V = hmac_sha256_hash(V, K); } }
std::string hd_public_key::encoded() const { auto prefix = mainnet_public_prefix; if (lineage_.testnet) prefix = testnet_public_prefix; auto data = build_data({ to_big_endian(prefix), to_byte(lineage_.depth), to_little_endian(lineage_.parent_fingerprint), to_big_endian(lineage_.child_number), c_, K_ }, checksum_size); append_checksum(data); return encode_base58(data); }
static void b_t(t_both *b, t_tree **err, t_tree **elem, t_fprint *p) { struct stat tmp; t_tree *node; t_data *data; if ((lstat(b->s, &tmp)) == -1) { node = ft_create_node_tree((void *)ft_strdup(b->s)); *err = ft_addnode(*err, node, NULL, cmp_err); } else { data = build_data(&tmp, b->s, b->func, p); node = ft_create_node_tree((void *)data); *elem = ft_addnode(*elem, node, NULL, b->func); } }
word_list create_mnemonic(data_slice entropy, const dictionary &lexicon) { if ((entropy.size() % mnemonic_seed_multiple) != 0) return word_list(); const size_t entropy_bits = (entropy.size() * byte_bits); const size_t check_bits = (entropy_bits / entropy_bit_divisor); const size_t total_bits = (entropy_bits + check_bits); const size_t word_count = (total_bits / bits_per_word); BITCOIN_ASSERT((total_bits % bits_per_word) == 0); BITCOIN_ASSERT((word_count % mnemonic_word_multiple) == 0); const auto data = build_data({entropy, sha256_hash(entropy)}); size_t bit = 0; word_list words; for (size_t word = 0; word < word_count; word++) { size_t position = 0; for (size_t loop = 0; loop < bits_per_word; loop++) { bit = (word * bits_per_word + loop); position <<= 1; const auto byte = bit / byte_bits; if ((data[byte] & bip39_shift(bit)) > 0) position++; } BITCOIN_ASSERT(position < dictionary_size); words.push_back(lexicon[position]); } BITCOIN_ASSERT(words.size() == ((bit + 1) / bits_per_word)); return words; }
hd_public_key hd_public_key::generate_public_key(uint32_t i) const { if (!valid_) return hd_private_key(); if (first_hardened_key <= i) return hd_public_key(); auto data = build_data({K_, to_big_endian(i)}); const auto I = split(hmac_sha512_hash(data, c_)); // The returned child key Ki is point(parse256(IL)) + Kpar. ec_point Ki = K_; if (!ec_add(Ki, I.L)) return hd_public_key(); hd_key_lineage lineage { lineage_.testnet, static_cast<uint8_t>(lineage_.depth + 1), fingerprint(), i }; return hd_public_key(Ki, I.R, lineage); }
bool init_tool(int argc, const char** argv, Options* opts) { *opts = Options::parse_options(argc, argv); if(!Options::has_required(*opts)) return false; COLOR_ENABLED = !opts->has_opt("no-color"); FORCE_SCALE = opts->has_opt("force-scale"); SMOOTH = opts->has_opt("smooth"); SCALE_ENERGY = opts->has_opt("energy"); PRINT_SCALE = opts->has_opt("print-scale"); REPORT_PROGRESS = opts->has_opt("progress"); VLOG = std::ofstream(opts->get_opt<std::string>("vlog", "vlog.log")); crf.label_alphabet = &alphabet_synth; baseline_crf.label_alphabet = &alphabet_synth; build_data(*opts); pre_process(alphabet_synth, corpus_synth); pre_process(alphabet_test, corpus_test); alphabet_synth.optimize(); remap(alphabet_synth, corpus_synth); alphabet_test.optimize(); remap(alphabet_test, corpus_test); auto testSize = opts->get_opt<unsigned>("test-corpus-size", 10); for(auto i = testSize; i < corpus_test.size(); i++) corpus_eval.add(corpus_test.input(i), corpus_test.label(i)); corpus_test.set_max_size(testSize); INFO("Synth sequences = " << corpus_synth.size()); INFO("Test sequences = " << corpus_test.size()); INFO("Eval sequences = " << corpus_eval.size()); return true; }