int rnd_get_bytes(u8 *buf, int len) { sha512_ctx sha_ctx; u8 hval[SHA512_DIGEST_SIZE]; int c_len, idx, i; ext_seed seed; int fail; if (reseed_cnt < 256) { DbgMsg("RNG not have sufficient entropy (%d reseeds), collect it now\n", reseed_cnt); } /* in RNG not have sufficient entropy, then collect it now */ while (reseed_cnt < 256) { dc_delay(1); /* wait 1 millisecond */ rnd_reseed_now(); } wait_object_infinity(&rnd_mutex); /* derive AES key from key pool */ aes256_asm_set_key(key_pool, rnd_key); /* mix pool state before get data from it */ rnd_pool_mix(); /* idx - position for extraction pool data */ idx = 0; fail = 0; do { c_len = min(len, SHA512_DIGEST_SIZE); seed.seed1 = getrnd_cnt++; seed.seed2 = len; /* collect additional entropy before extract data block */ rnd_reseed_now(); sha512_init(&sha_ctx); sha512_hash(&sha_ctx, rnd_pool + idx, SHA512_DIGEST_SIZE); sha512_hash(&sha_ctx, pv(&seed), sizeof(seed)); sha512_done(&sha_ctx, hval); /* encrypt hash value with AES in ECB mode */ for (i = 0; i < SHA512_DIGEST_SIZE; i += AES_BLOCK_SIZE) { aes256_asm_encrypt(hval + i, hval + i, rnd_key); } /* copy data to output */ __try { memcpy(buf, hval, c_len); } __except(EXCEPTION_EXECUTE_HANDLER) { fail = 1; } /* increment extraction pointer */ if ( (idx += SHA512_DIGEST_SIZE) == RNG_POOL_SIZE ) { /* if all data from pool extracted then mix pool for use new entropy added with reseeds */ rnd_pool_mix(); idx = 0; } /* collect additional entropy after extract data block */ rnd_reseed_now(); /* update buffer pointer and remaining length */ buf += c_len; len -= c_len; } while ( (len != 0) && (fail == 0) ); /* mix pool after get data to prevent "could boot" attacks to generated keys */ rnd_pool_mix(); /* Prevent leaks */ zeroauto(rnd_key, sizeof(aes256_key)); zeroauto(&sha_ctx, sizeof(sha_ctx)); zeroauto(hval, sizeof(hval)); zeroauto(&seed, sizeof(seed)); KeReleaseMutex(&rnd_mutex, FALSE); return fail == 0; }
int test_aes256() { char tmp[16]; aes256_key skey; int i; #ifndef SMALL_CODE u32 old_p; #endif #ifdef SMALL_CODE /* initialize AES tables */ aes256_gentab(); #else /* allow execute code from key buffer */ if (VirtualProtect(&skey, sizeof(skey), PAGE_EXECUTE_READWRITE, &old_p) == 0) { return 0; } #endif /* test basic assembler inmpementation */ for (i = 0; i < array_num(aes256_vectors); i++) { #ifdef SMALL_CODE aes256_set_key(aes256_vectors[i].key, &skey); aes256_encrypt(aes256_vectors[i].plaintext, tmp, &skey); #else aes256_asm_set_key(aes256_vectors[i].key, &skey); aes256_asm_encrypt(aes256_vectors[i].plaintext, tmp, &skey); #endif if (memcmp(aes256_vectors[i].ciphertext, tmp, sizeof(tmp)) != 0) { return 0; } #ifdef SMALL_CODE aes256_decrypt(aes256_vectors[i].ciphertext, tmp, &skey); #else aes256_asm_decrypt(aes256_vectors[i].ciphertext, tmp, &skey); #endif if (memcmp(aes256_vectors[i].plaintext, tmp, sizeof(tmp)) != 0) { return 0; } #if !defined(SMALL_CODE) || !defined(_M_X64) /* test AES with VIA Padlock API */ if (aes256_padlock_available() != 0) { #ifdef SMALL_CODE aes256_padlock_encrypt(aes256_vectors[i].plaintext, tmp, &skey); #else aes256_padlock_encrypt(aes256_vectors[i].plaintext, tmp, 1, &skey); #endif if (memcmp(aes256_vectors[i].ciphertext, tmp, sizeof(tmp)) != 0) { return 0; } #ifdef SMALL_CODE aes256_padlock_decrypt(aes256_vectors[i].ciphertext, tmp, &skey); #else aes256_padlock_decrypt(aes256_vectors[i].ciphertext, tmp, 1, &skey); #endif if (memcmp(aes256_vectors[i].plaintext, tmp, sizeof(tmp)) != 0) { return 0; } } #endif } return 1; }