Esempio n. 1
0
int ccmode_ctr_setctr(CC_UNUSED const struct ccmode_ctr *mode, ccctr_ctx *ctx, const void *ctr)
{
    CCMODE_CTR_KEY_PAD_OFFSET(ctx) = CCMODE_CTR_KEY_ECB(ctx)->block_size;
    CC_MEMCPY(CCMODE_CTR_KEY_CTR(ctx), ctr, CCMODE_CTR_KEY_ECB(ctx)->block_size);
    
    return 0;
}
Esempio n. 2
0
void ccmode_ctr_crypt(ccctr_ctx *key,
                      size_t nbytes, const void *in, void *out) {
    const struct ccmode_ecb *ecb = CCMODE_CTR_KEY_ECB(key);
    const ccecb_ctx *ecb_key = CCMODE_CTR_KEY_ECB_KEY(key);
    uint8_t *ctr = (uint8_t *)CCMODE_CTR_KEY_CTR(key);
    uint8_t *pad = (uint8_t *)CCMODE_CTR_KEY_PAD(key);
    cc_size pad_len = CCMODE_CTR_KEY_PAD_LEN(key);
    const uint8_t *pt = in;
    // Counter is 64bit wide for cipher with block size of 64bit or more
    // This is to match the assembly
    const size_t counter_size=(CC_MIN(ecb->block_size,(typeof(ecb->block_size))8));
    uint8_t *ct = out;

    while (nbytes) {
        if (pad_len == ecb->block_size) {


            ecb->ecb(ecb_key, 1, ctr, pad);
            pad_len = 0;

            /* increment the big endian counter */
            for (size_t x = ecb->block_size; x-- > (ecb->block_size-counter_size);) {
                ctr[x] = (ctr[x] + (unsigned char)1) & (unsigned char)255;
                if (ctr[x] != (unsigned char)0) {
                    break;
                }
            }

            if (nbytes==0) break;
        }

        /* TODO: Make sure this works.  If pt and ct aren't aligned this
         might not work right.  */
#if 0
        if (pad_len == 0 && nbytes >= ecb->block_size) {
            ccn_xor(ecb->block_size / CCN_UNIT_SIZE, ct, pt, pad);
            pad_len = ecb->block_size;
            pt += pad_len;
            ct += pad_len;
            nbytes -= pad_len;
        }

#endif
        do {
            *ct++ = *pt++ ^ pad[pad_len++];
            --nbytes;
        } while ((nbytes>0)&&(pad_len<ecb->block_size));
    }
    CCMODE_CTR_KEY_PAD_LEN(key) = pad_len;
}