EXPORT_SYM int ripemd160_digest(const hash_state *hs, uint8_t digest[RIPEMD160_DIGEST_SIZE]) { hash_state tmp; unsigned i; if (NULL==hs || digest==NULL) return ERR_NULL; tmp = *hs; /* Append the padding */ tmp.buf[tmp.bufpos++] = 0x80; if (tmp.bufpos > 56) { tmp.bufpos = 64; ripemd160_compress(&tmp); } /* Append the length */ STORE_U64_LITTLE(&tmp.buf[sizeof tmp.buf - 8], tmp.length); tmp.bufpos = 64; ripemd160_compress(&tmp); /* Copy the final state into the output buffer */ assert(RIPEMD160_DIGEST_SIZE == sizeof tmp.h); for (i=0; i<5; i++) STORE_U32_LITTLE(digest + i*sizeof tmp.h[0], tmp.h[i]); return 0; }
EXPORT_SYM int md4_digest(const hash_state *hs, uint8_t digest[16]) { static uint8_t s[8]; uint32_t padlen; hash_state temp; unsigned i; uint64_t bitlen; static const uint8_t padding[64] = { 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }; if (NULL==hs || NULL==digest) return ERR_NULL; temp = *hs; bitlen = temp.bitlen; /* Save current length */ padlen= (56<=hs->count) ? 56-hs->count+64: 56-hs->count; md4_update(&temp, padding, padlen); for (i=0; i<8; i++) s[i] = (uint8_t)(bitlen >> (i*8)); md4_update(&temp, s, 8); STORE_U32_LITTLE(&digest[0], temp.A); STORE_U32_LITTLE(&digest[4], temp.B); STORE_U32_LITTLE(&digest[8], temp.C); STORE_U32_LITTLE(&digest[12], temp.D); return 0; }
static void _salsa20_block(unsigned rounds, uint32_t *input, uint8_t *output) { unsigned x0, x1, x2, x3, x4, x5, x6, x7; unsigned x8, x9, x10, x11, x12, x13, x14, x15; unsigned i; x0 = input[0]; x1 = input[1]; x2 = input[2]; x3 = input[3]; x4 = input[4]; x5 = input[5]; x6 = input[6]; x7 = input[7]; x8 = input[8]; x9 = input[9]; x10 = input[10]; x11 = input[11]; x12 = input[12]; x13 = input[13]; x14 = input[14]; x15 = input[15]; for (i = rounds; i > 0; i -= 2) { /* Column round */ x4 = XOR( x4, ROTL32( x0 + x12, 7)); x8 = XOR( x8, ROTL32( x4 + x0, 9)); x12 = XOR(x12, ROTL32( x8 + x4, 13)); x0 = XOR( x0, ROTL32(x12 + x8, 18)); x9 = XOR( x9, ROTL32( x5 + x1, 7)); x13 = XOR(x13, ROTL32( x9 + x5, 9)); x1 = XOR( x1, ROTL32(x13 + x9, 13)); x5 = XOR( x5, ROTL32( x1 + x13, 18)); x14 = XOR(x14, ROTL32(x10 + x6, 7)); x2 = XOR( x2, ROTL32(x14 + x10, 9)); x6 = XOR( x6, ROTL32( x2 + x14, 13)); x10 = XOR(x10, ROTL32( x6 + x2, 18)); x3 = XOR( x3, ROTL32(x15 + x11, 7)); x7 = XOR( x7, ROTL32( x3 + x15, 9)); x11 = XOR(x11, ROTL32( x7 + x3, 13)); x15 = XOR(x15, ROTL32(x11 + x7, 18)); /* Row round */ x1 = XOR( x1, ROTL32( x0 + x3, 7)); x2 = XOR( x2, ROTL32( x1 + x0, 9)); x3 = XOR( x3, ROTL32( x2 + x1, 13)); x0 = XOR( x0, ROTL32( x3 + x2, 18)); x6 = XOR( x6, ROTL32( x5 + x4, 7)); x7 = XOR( x7, ROTL32( x6 + x5, 9)); x4 = XOR( x4, ROTL32( x7 + x6, 13)); x5 = XOR( x5, ROTL32( x4 + x7, 18)); x11 = XOR(x11, ROTL32(x10 + x9, 7)); x8 = XOR( x8, ROTL32(x11 + x10, 9)); x9 = XOR( x9, ROTL32( x8 + x11, 13)); x10 = XOR(x10, ROTL32( x9 + x8, 18)); x12 = XOR(x12, ROTL32(x15 + x14, 7)); x13 = XOR(x13, ROTL32(x12 + x15, 9)); x14 = XOR(x14, ROTL32(x13 + x12, 13)); x15 = XOR(x15, ROTL32(x14 + x13, 18)); } x0 = x0 + input[0]; x1 = x1 + input[1]; x2 = x2 + input[2]; x3 = x3 + input[3]; x4 = x4 + input[4]; x5 = x5 + input[5]; x6 = x6 + input[6]; x7 = x7 + input[7]; x8 = x8 + input[8]; x9 = x9 + input[9]; x10 = x10 + input[10]; x11 = x11 + input[11]; x12 = x12 + input[12]; x13 = x13 + input[13]; x14 = x14 + input[14]; x15 = x15 + input[15]; STORE_U32_LITTLE (output + 0, x0); STORE_U32_LITTLE (output + 4, x1); STORE_U32_LITTLE (output + 8, x2); STORE_U32_LITTLE (output + 12, x3); STORE_U32_LITTLE (output + 16, x4); STORE_U32_LITTLE (output + 20, x5); STORE_U32_LITTLE (output + 24, x6); STORE_U32_LITTLE (output + 28, x7); STORE_U32_LITTLE (output + 32, x8); STORE_U32_LITTLE (output + 36, x9); STORE_U32_LITTLE (output + 40, x10); STORE_U32_LITTLE (output + 44, x11); STORE_U32_LITTLE (output + 48, x12); STORE_U32_LITTLE (output + 52, x13); STORE_U32_LITTLE (output + 56, x14); STORE_U32_LITTLE (output + 60, x15); /* Increment block counter */ input[8] = input[8] + 1; if (!input[8]) { input[9] = input[9] + 1; /* stopping at 2^70 bytes per nonce is user's responsibility */ } }