static int tweak_crypt(const unsigned char *P, unsigned char *C, unsigned char *T, symmetric_xts *xts) { unsigned long x; int err; /* tweak encrypt block i */ #ifdef LTC_FAST for (x = 0; x < 16; x += sizeof(LTC_FAST_TYPE)) { *((LTC_FAST_TYPE *)&C[x]) = *((LTC_FAST_TYPE *)&P[x]) ^ *((LTC_FAST_TYPE *)&T[x]); } #else for (x = 0; x < 16; x++) { C[x] = P[x] ^ T[x]; } #endif if ((err = cipher_descriptor[xts->cipher].ecb_encrypt(C, C, &xts->key1)) != CRYPT_OK) { return err; } #ifdef LTC_FAST for (x = 0; x < 16; x += sizeof(LTC_FAST_TYPE)) { *((LTC_FAST_TYPE *)&C[x]) ^= *((LTC_FAST_TYPE *)&T[x]); } #else for (x = 0; x < 16; x++) { C[x] = C[x] ^ T[x]; } #endif /* LFSR the tweak */ xts_mult_x(T); return CRYPT_OK; }
static int _tweak_uncrypt(const unsigned char *C, unsigned char *P, unsigned char *T, symmetric_xts *xts) { unsigned long x; int err; /* tweak encrypt block i */ #ifdef LTC_FAST for (x = 0; x < 16; x += sizeof(LTC_FAST_TYPE)) { *(LTC_FAST_TYPE_PTR_CAST(&P[x])) = *(LTC_FAST_TYPE_PTR_CAST(&C[x])) ^ *(LTC_FAST_TYPE_PTR_CAST(&T[x])); } #else for (x = 0; x < 16; x++) { P[x] = C[x] ^ T[x]; } #endif err = cipher_descriptor[xts->cipher].ecb_decrypt(P, P, &xts->key1); #ifdef LTC_FAST for (x = 0; x < 16; x += sizeof(LTC_FAST_TYPE)) { *(LTC_FAST_TYPE_PTR_CAST(&P[x])) ^= *(LTC_FAST_TYPE_PTR_CAST(&T[x])); } #else for (x = 0; x < 16; x++) { P[x] = P[x] ^ T[x]; } #endif /* LFSR the tweak */ xts_mult_x(T); return err; }
static int tweak_crypt( void *context, ltc_gladman_encrypt MethodEncrypt, const unsigned char *P, unsigned char *C, unsigned char *T //ltc-orig symmetric_xts *xts ) { unsigned long x; int err; err = CRYPT_OK; /* tweak encrypt block i */ #ifdef LTC_FAST for (x = 0; x < 16; x += sizeof(LTC_FAST_TYPE)) { *((LTC_FAST_TYPE*)&C[x]) = *((LTC_FAST_TYPE*)&P[x]) ^ *((LTC_FAST_TYPE*)&T[x]); } #else for (x = 0; x < 16; x++) { C[x] = P[x] ^ T[x]; } #endif //ltc-orig if ((err = cipher_descriptor[xts->cipher].ecb_encrypt(C, C, &xts->key1)) != CRYPT_OK) { //ltc-orig return err; //ltc-orig } MethodEncrypt(context, C, C); #ifdef LTC_FAST for (x = 0; x < 16; x += sizeof(LTC_FAST_TYPE)) { *((LTC_FAST_TYPE*)&C[x]) ^= *((LTC_FAST_TYPE*)&T[x]); } #else for (x = 0; x < 16; x++) { C[x] = C[x] ^ T[x]; } #endif /* LFSR the tweak */ xts_mult_x(T); return err; }
static int tweak_uncrypt(const uint8_t *C, uint8_t *P, uint8_t *T, aesedp_decrypt_ctx *ctx) { uint32_t x; uint32_t err; /* tweak encrypt block i */ for (x = 0; x < 16; x += sizeof(uint64_t)) { *((uint64_t*)&P[x]) = *((uint64_t*)&C[x]) ^ *((uint64_t*)&T[x]); } err = aes_decrypt(P, P, ctx); for (x = 0; x < 16; x += sizeof(uint64_t)) { *((uint64_t*)&P[x]) ^= *((uint64_t*)&T[x]); } /* LFSR the tweak */ xts_mult_x(T); return err; }
static int tweak_crypt(const uint8_t *P, uint8_t *C, uint8_t *T, aesedp_encrypt_ctx *ctx) { uint32_t x; uint32_t err; /* tweak encrypt block i */ for (x = 0; x < 16; x += sizeof(uint64_t)) { *((uint64_t*)&C[x]) = *((uint64_t*)&P[x]) ^ *((uint64_t*)&T[x]); } if ((err = aes_encrypt(C, C, ctx)) != CRYPT_OK) { return CRYPT_INVALID_KEYSIZE; } for (x = 0; x < 16; x += sizeof(uint64_t)) { *((uint64_t*)&C[x]) ^= *((uint64_t*)&T[x]); } /* LFSR the tweak */ xts_mult_x(T); return CRYPT_OK; }
/** XTS Decryption @param ct [in] Ciphertext @param ptlen Length of plaintext (and ciphertext) @param pt [out] Plaintext @param tweak [in] The 128--bit encryption tweak (e.g. sector number) @param xts The XTS structure Returns CRYPT_OK upon success */ int xts_decrypt(const unsigned char *ct, unsigned long ptlen, unsigned char *pt, unsigned char *tweak, symmetric_xts *xts) { unsigned char PP[16], CC[16], T[16]; unsigned long i, m, mo, lim; int err; /* check inputs */ LTC_ARGCHK(pt != NULL); LTC_ARGCHK(ct != NULL); LTC_ARGCHK(tweak != NULL); LTC_ARGCHK(xts != NULL); /* check if valid */ if ((err = cipher_is_valid(xts->cipher)) != CRYPT_OK) { return err; } /* get number of blocks */ m = ptlen >> 4; mo = ptlen & 15; /* must have at least one full block */ if (m == 0) { return CRYPT_INVALID_ARG; } if (mo == 0) { lim = m; } else { lim = m - 1; } if (cipher_descriptor[xts->cipher].accel_xts_decrypt && lim > 0) { /* use accelerated decryption for whole blocks */ if ((err = cipher_descriptor[xts->cipher].accel_xts_decrypt(ct, pt, lim, tweak, &xts->key1, &xts->key2)) != CRYPT_OK) { return err; } ct += lim * 16; pt += lim * 16; /* tweak is encrypted on output */ XMEMCPY(T, tweak, sizeof(T)); } else { /* encrypt the tweak */ if ((err = cipher_descriptor[xts->cipher].ecb_encrypt(tweak, T, &xts->key2)) != CRYPT_OK) { return err; } for (i = 0; i < lim; i++) { if ((err = _tweak_uncrypt(ct, pt, T, xts)) != CRYPT_OK) { return err; } ct += 16; pt += 16; } } /* if ptlen not divide 16 then */ if (mo > 0) { XMEMCPY(CC, T, 16); xts_mult_x(CC); /* PP = tweak decrypt block m-1 */ if ((err = _tweak_uncrypt(ct, PP, CC, xts)) != CRYPT_OK) { return err; } /* Pm = first ptlen % 16 bytes of PP */ for (i = 0; i < mo; i++) { CC[i] = ct[16 + i]; pt[16 + i] = PP[i]; } for (; i < 16; i++) { CC[i] = PP[i]; } /* Pm-1 = Tweak uncrypt CC */ if ((err = _tweak_uncrypt(CC, pt, T, xts)) != CRYPT_OK) { return err; } } /* Decrypt the tweak back */ if ((err = cipher_descriptor[xts->cipher].ecb_decrypt(T, tweak, &xts->key2)) != CRYPT_OK) { return err; } return CRYPT_OK; }
int xts_decrypt( const uint8_t *ct, unsigned long ptlen, uint8_t *pt, const uint8_t *tweak, symmetric_xts *xts) { aesedp_decrypt_ctx *decrypt_ctx = &xts->key1.decrypt; uint8_t PP[16], CC[16], T[16]; uint32_t i, m, mo, lim; uint32_t err; /* check inputs */ if((pt == 0) || (ct == 0)|| (tweak == 0) || (xts == 0)) return 1; /* get number of blocks */ m = ptlen >> 4; mo = ptlen & 15; /* must have at least one full block */ if (m == 0) { return CRYPT_INVALID_ARG; } /* encrypt the tweak , yes - encrypt */ if ((err = aes_encrypt(tweak, T, &xts->key2.encrypt)) != 0) { return CRYPT_INVALID_KEYSIZE; } /* for i = 0 to m-2 do */ if (mo == 0) { lim = m; } else { lim = m - 1; } for (i = 0; i < lim; i++) { err = tweak_uncrypt(ct, pt, T, decrypt_ctx); ct += 16; pt += 16; } /* if ptlen not divide 16 then */ if (mo > 0) { memcpy(CC, T, 16); xts_mult_x(CC); /* PP = tweak decrypt block m-1 */ if ((err = tweak_uncrypt(ct, PP, CC, decrypt_ctx)) != CRYPT_OK) { return err; } /* Pm = first ptlen % 16 bytes of PP */ for (i = 0; i < mo; i++) { CC[i] = ct[16+i]; pt[16+i] = PP[i]; } for (; i < 16; i++) { CC[i] = PP[i]; } /* Pm-1 = Tweak uncrypt CC */ if ((err = tweak_uncrypt(CC, pt, T, decrypt_ctx)) != CRYPT_OK) { return err; } } return CRYPT_OK; }
*/int xts_decrypt( const unsigned char *ct, unsigned long ptlen, unsigned char *pt, #ifdef LTC_LINARO_FIX_XTS unsigned char *tweak, #else const unsigned char *tweak, #endif symmetric_xts *xts) { unsigned char PP[16], CC[16], T[16]; unsigned long i, m, mo, lim; int err; /* check inputs */ LTC_ARGCHK(pt != NULL); LTC_ARGCHK(ct != NULL); LTC_ARGCHK(tweak != NULL); LTC_ARGCHK(xts != NULL); /* check if valid */ if ((err = cipher_is_valid(xts->cipher)) != CRYPT_OK) { return err; } /* get number of blocks */ m = ptlen >> 4; mo = ptlen & 15; /* must have at least one full block */ if (m == 0) { return CRYPT_INVALID_ARG; } /* encrypt the tweak */ if ((err = cipher_descriptor[xts->cipher].ecb_encrypt(tweak, T, &xts->key2)) != CRYPT_OK) { return err; } /* for i = 0 to m-2 do */ if (mo == 0) { lim = m; } else { lim = m - 1; } for (i = 0; i < lim; i++) { err = tweak_uncrypt(ct, pt, T, xts); ct += 16; pt += 16; } /* if ptlen not divide 16 then */ if (mo > 0) { XMEMCPY(CC, T, 16); xts_mult_x(CC); /* PP = tweak decrypt block m-1 */ if ((err = tweak_uncrypt(ct, PP, CC, xts)) != CRYPT_OK) { return err; } /* Pm = first ptlen % 16 bytes of PP */ for (i = 0; i < mo; i++) { CC[i] = ct[16+i]; pt[16+i] = PP[i]; } for (; i < 16; i++) { CC[i] = PP[i]; } /* Pm-1 = Tweak uncrypt CC */ if ((err = tweak_uncrypt(CC, pt, T, xts)) != CRYPT_OK) { return err; } } #ifdef LTC_LINARO_FIX_XTS /* Decrypt the tweak back */ if ((err = cipher_descriptor[xts->cipher].ecb_decrypt(T, tweak, &xts->key2)) != CRYPT_OK) { return err; } #endif return CRYPT_OK; }