// // Flatten the valid iolist to the buffer of // appropriate size pointed to by ptr // uint8_t *iolist_flatten(term_t l, uint8_t *ptr) { if (is_nil(l)) return ptr; if (is_cons(l)) { do { uint32_t *term_data = peel_cons(l); term_t e = term_data[0]; if (is_int(e)) *ptr++ = int_value(e); else { assert(is_list(e) || (is_boxed(e) && is_binary(peel_boxed(e)))); ptr = iolist_flatten(e, ptr); } l = term_data[1]; if (is_boxed(l) && is_binary(peel_boxed(l))) return iolist_flatten(l, ptr); } while (is_cons(l)); assert(is_nil(l)); } else // is_binary() { bits_t bs, to; bits_get_real(peel_boxed(l), &bs); bits_init_buf(ptr, (bs.ends +7) /8, &to); ptr += (bs.ends - bs.starts) /8; bits_copy(&bs, &to); assert(bs.starts == bs.ends); } return ptr; }
term_t cbif_sha_update2(proc_t *proc, term_t *regs) { term_t Context = regs[0]; term_t Data = regs[1]; if (!is_boxed_binary(Context)) badarg(Context); bits_t bs, dst; bits_get_real(peel_boxed(Context), &bs); if (bs.ends -bs.starts != sizeof(struct sha1_ctx) *8) badarg(Context); struct sha1_ctx ctx; bits_init_buf((uint8_t *)&ctx, sizeof(ctx), &dst); bits_copy(&bs, &dst); if (!is_boxed_binary(Data) && !is_list(Data)) badarg(Data); int sz = iolist_size(Data); if (sz < 0) badarg(Data); assert(sz <= 65536); //TODO: use heap_tmp_buf for larger Data uint8_t buf[sz]; iolist_flatten(Data, buf); sha1_update(&ctx, sz, buf); uint8_t *ptr; term_t bin = heap_make_bin(&proc->hp, sizeof(ctx), &ptr); memcpy(ptr, &ctx, sizeof(ctx)); return bin; }
term_t cbif_aes_cbc_crypt4(proc_t *proc, term_t *regs) { term_t Key = regs[0]; term_t IVec = regs[1]; term_t Data = regs[2]; term_t Dir = regs[3]; if (!is_list(Key) && !is_boxed_binary(Key)) badarg(Key); if (!is_boxed_binary(IVec)) badarg(IVec); if (!is_list(Data) && !is_boxed_binary(Data)) badarg(Data); if (!is_bool(Dir)) badarg(Dir); int key_size = iolist_size(Key); if (key_size < AES_MIN_KEY_SIZE || key_size > AES_MAX_KEY_SIZE) badarg(Key); uint8_t key_buf[key_size]; iolist_flatten(Key, key_buf); bits_t src, dst; bits_get_real(peel_boxed(IVec), &src); if (src.ends -src.starts != AES_BLOCK_SIZE *8) badarg(IVec); uint8_t ivec_buf[AES_BLOCK_SIZE]; bits_init_buf(ivec_buf, AES_BLOCK_SIZE, &dst); bits_copy(&src, &dst); int data_size = iolist_size(Data); if (data_size < 0) badarg(Data); assert(data_size <= 65536); //TODO: use heap_tmp_buf for larger Data uint8_t data_buf[data_size]; iolist_flatten(Data, data_buf); struct CBC_CTX(struct aes_ctx, AES_BLOCK_SIZE) ctx; if (Dir == A_TRUE) aes_set_encrypt_key((struct aes_ctx *)&ctx, key_size, key_buf); else aes_set_decrypt_key((struct aes_ctx *)&ctx, key_size, key_buf); CBC_SET_IV(&ctx, ivec_buf); uint8_t *ptr; term_t cipher_text = heap_make_bin(&proc->hp, data_size, &ptr); if (Dir == A_TRUE) CBC_ENCRYPT(&ctx, aes_encrypt, data_size, ptr, data_buf); else CBC_DECRYPT(&ctx, aes_decrypt, data_size, ptr, data_buf); return cipher_text; }
term_t cbif_sha_final1(proc_t *proc, term_t *regs) { term_t Context = regs[0]; if (!is_boxed_binary(Context)) badarg(Context); bits_t bs, dst; bits_get_real(peel_boxed(Context), &bs); if (bs.ends -bs.starts != sizeof(struct sha1_ctx) *8) badarg(Context); struct sha1_ctx ctx; bits_init_buf((uint8_t *)&ctx, sizeof(ctx), &dst); bits_copy(&bs, &dst); uint8_t *ptr; term_t bin = heap_make_bin(&proc->hp, SHA1_DIGEST_SIZE, &ptr); sha1_digest(&ctx, SHA1_DIGEST_SIZE, ptr); return bin; }
// // Flatten the valid bits list to the bits_t context // void bits_list_flatten(term_t l, bits_t *bs) { if (is_nil(l)) return; if (is_cons(l)) { do { uint32_t *term_data = peel_cons(l); term_t e = term_data[0]; if (is_int(e)) { int o = int_value(e); assert(o >= 0 && o < 256); bits_put_octet(bs, (uint8_t)o); } else { assert(is_list(e) || (is_boxed(e) && is_binary(peel_boxed(e)))); bits_list_flatten(e, bs); } l = term_data[1]; if (is_boxed(l) && is_binary(peel_boxed(l))) { bits_list_flatten(l, bs); return; } } while (is_cons(l)); assert(is_nil(l)); } else // is_binary() { bits_t source; bits_get_real(peel_boxed(l), &source); bits_copy(&source, bs); } }