static type *pointer_to_qualified(type *base, type *lhs, type *rhs) { enum type_qualifier qlhs = lhs ? type_qual(type_next(lhs)) : qual_none; enum type_qualifier qrhs = rhs ? type_qual(type_next(rhs)) : qual_none; return type_ptr_to(type_qualify(base, qlhs | qrhs)); }
static int calc_ptr_step(type *t) { type *tnext; /* we are calculating the sizeof *t */ if(type_is_primitive(type_is_ptr(t), type_void)) return type_primitive_size(type_void); if(type_is_primitive(t, type_unknown)) return 1; tnext = type_next(t); if(type_is_vla(tnext, VLA_ANY_DIMENSION)) return -1; return type_size(tnext, NULL); }
unsigned vla_decl_space(decl *d) { const unsigned pws = platform_word_size(); type *t; unsigned sz; if(STORE_IS_TYPEDEF(d->store)) sz = 0; /* just the sizes */ else if(type_is_vla(d->ref, VLA_ANY_DIMENSION)) sz = pws * 2; /* T *ptr; void *orig_sp; */ else sz = pws; /* T *ptr; - no stack res, no orig_sp */ for(t = d->ref; t; t = type_next(t)) if(type_is_vla(t, VLA_TOP_DIMENSION)) sz += pws; return sz; }
static void try_pointer_propagate( expr *e, enum type_cmp cmp, type *const tt_l, type *const tt_r) { /* 6.5.15 p6 */ int l_ptr = !!type_is_ptr_or_block(tt_l); int r_ptr = !!type_is_ptr_or_block(tt_r); /* if both the second and third operands are pointers */ if(l_ptr && r_ptr){ int allowed = TYPE_EQUAL_ANY | TYPE_QUAL_ADD | TYPE_QUAL_SUB | TYPE_QUAL_POINTED_ADD | TYPE_QUAL_POINTED_SUB; if(cmp & allowed){ e->tree_type = pointer_to_qualified(type_next(tt_l), tt_l, tt_r); } } if(!e->tree_type && (l_ptr || r_ptr)){ /* or one is a null pointer constant and the other is a pointer */ int l_ptr_null = expr_is_null_ptr( e->lhs ? e->lhs : e->expr, NULL_STRICT_INT); int r_ptr_null = expr_is_null_ptr(e->rhs, NULL_STRICT_INT); /* both may still be pointers here */ if((l_ptr && r_ptr_null) || (r_ptr && l_ptr_null)){ type *pointed_to; if(l_ptr_null != r_ptr_null){ /* only one is an int - pick the other side */ pointed_to = type_next(l_ptr_null ? tt_r : tt_l); }else{ /* both are pointers, pick either side */ pointed_to = type_next(l_ptr ? tt_l : tt_r); } e->tree_type = pointer_to_qualified( pointed_to, l_ptr ? tt_l : NULL, r_ptr ? tt_r : NULL); } } if(!e->tree_type && l_ptr && r_ptr){ e->tree_type = pointer_to_qualified( type_nav_btype(cc1_type_nav, type_void), tt_l, tt_r); /* gcc/clang relax the rule here. * 0 ? (A *)0 : (B *)0 * becomes a void pointer too */ if(!type_is_void_ptr(tt_l) && !type_is_void_ptr(tt_r)){ char buf[TYPE_STATIC_BUFSIZ]; cc1_warn_at(&e->where, mismatch_conditional, "conditional type mismatch (%s vs %s)", type_to_str(tt_l), type_to_str_r(buf, tt_r)); } } if(!e->tree_type){ char buf[TYPE_STATIC_BUFSIZ]; warn_at_print_error(&e->where, "conditional type mismatch (%s vs %s)", type_to_str(tt_l), type_to_str_r(buf, tt_r)); fold_had_error = 1; e->tree_type = type_nav_btype(cc1_type_nav, type_void); } }
static void apply_ptr_step( out_ctx *octx, const out_val **lhs, const out_val **rhs, const out_val **div_out) { int l_ptr = !!type_is((*lhs)->t, type_ptr); int r_ptr = !!type_is((*rhs)->t, type_ptr); int ptr_step; if(!l_ptr && !r_ptr) return; ptr_step = calc_ptr_step((l_ptr ? *lhs : *rhs)->t); if(l_ptr ^ r_ptr){ /* ptr +/- int, adjust the non-ptr by sizeof *ptr */ const out_val **incdec = (l_ptr ? rhs : lhs); out_val *mut_incdec; *incdec = mut_incdec = v_dup_or_reuse(octx, *incdec, (*incdec)->t); switch(mut_incdec->type){ case V_CONST_I: if(ptr_step == -1){ *incdec = out_op(octx, op_multiply, *incdec, vla_size( type_next((l_ptr ? *lhs : *rhs)->t), octx)); mut_incdec = NULL; /* safety */ }else{ mut_incdec->bits.val_i *= ptr_step; } break; case V_CONST_F: assert(0 && "float pointer inc?"); case V_LBL: case V_FLAG: case V_REG_SPILT: assert(mut_incdec->retains == 1); *incdec = (out_val *)v_to_reg(octx, *incdec); case V_REG: { const out_val *n; if(ptr_step == -1){ n = vla_size( type_next((l_ptr ? *lhs : *rhs)->t), octx); }else{ n = out_new_l( octx, type_nav_btype(cc1_type_nav, type_intptr_t), ptr_step); } *incdec = (out_val *)out_op(octx, op_multiply, *incdec, n); break; } } }else if(l_ptr && r_ptr){ /* difference - divide afterwards */ if(ptr_step == -1){ *div_out = vla_size(type_next((*lhs)->t), octx); }else{ *div_out = out_new_l(octx, type_ptr_to(type_nav_btype(cc1_type_nav, type_void)), ptr_step); } } }
static void asm_declare_init(enum section_type sec, decl_init *init, type *tfor) { type *r; if(init == DYNARRAY_NULL) init = NULL; if(!init){ /* don't initialise flex-arrays */ if(!type_is_incomplete_array(tfor)){ asm_declare_pad(sec, type_size(tfor, NULL), "null init"/*, type_to_str(tfor)*/); }else{ asm_out_section(sec, ASM_COMMENT " flex array init skipped\n"); } }else if((r = type_is_primitive(tfor, type_struct))){ /* array of stmts for each member * assumes the ->bits.inits order is member order */ struct_union_enum_st *const sue = r->bits.type->sue; sue_member **mem; decl_init **i; unsigned end_of_last = 0; struct bitfield_val *bitfields = NULL; unsigned nbitfields = 0; decl *first_bf = NULL; expr *copy_from_exp; UCC_ASSERT(init->type == decl_init_brace, "unbraced struct"); #define DEBUG(s, ...) /*fprintf(f, "\033[35m" s "\033[m\n", __VA_ARGS__)*/ i = init->bits.ar.inits; /* check for compound-literal copy-init */ if((copy_from_exp = decl_init_is_struct_copy(init, sue))){ decl_init *copy_from_init; copy_from_exp = expr_skip_lval2rval(copy_from_exp); /* the only struct-expression that's possible * in static context is a compound literal */ assert(expr_kind(copy_from_exp, compound_lit) && "unhandled expression init"); copy_from_init = copy_from_exp->bits.complit.decl->bits.var.init.dinit; assert(copy_from_init->type == decl_init_brace); i = copy_from_init->bits.ar.inits; } /* iterate using members, not inits */ for(mem = sue->members; mem && *mem; mem++) { decl *d_mem = (*mem)->struct_member; decl_init *di_to_use = NULL; if(i){ int inc = 1; if(*i == NULL) inc = 0; else if(*i != DYNARRAY_NULL) di_to_use = *i; if(inc){ i++; if(!*i) i = NULL; /* reached end */ } } DEBUG("init for %ld/%s, %s", mem - sue->members, d_mem->spel, di_to_use ? di_to_use->bits.expr->f_str() : NULL); /* only pad if we're not on a bitfield or we're on the first bitfield */ if(!d_mem->bits.var.field_width || !first_bf){ DEBUG("prev padding, offset=%d, end_of_last=%d", d_mem->struct_offset, end_of_last); UCC_ASSERT( d_mem->bits.var.struct_offset >= end_of_last, "negative struct pad, sue %s, member %s " "offset %u, end_of_last %u", sue->spel, decl_to_str(d_mem), d_mem->bits.var.struct_offset, end_of_last); asm_declare_pad(sec, d_mem->bits.var.struct_offset - end_of_last, "prev struct padding"); } if(d_mem->bits.var.field_width){ if(!first_bf || d_mem->bits.var.first_bitfield){ if(first_bf){ DEBUG("new bitfield group (%s is new boundary), old:", d_mem->spel); /* next bitfield group - store the current */ bitfields_out(sec, bitfields, &nbitfields, first_bf->ref); } first_bf = d_mem; } bitfields = bitfields_add( bitfields, &nbitfields, d_mem, di_to_use); }else{ if(nbitfields){ DEBUG("at non-bitfield, prev-bitfield out:", 0); bitfields_out(sec, bitfields, &nbitfields, first_bf->ref); first_bf = NULL; } DEBUG("normal init for %s:", d_mem->spel); asm_declare_init(sec, di_to_use, d_mem->ref); } if(type_is_incomplete_array(d_mem->ref)){ UCC_ASSERT(!mem[1], "flex-arr not at end"); }else if(!d_mem->bits.var.field_width || d_mem->bits.var.first_bitfield){ unsigned last_sz = type_size(d_mem->ref, NULL); end_of_last = d_mem->bits.var.struct_offset + last_sz; DEBUG("done with member \"%s\", end_of_last = %d", d_mem->spel, end_of_last); } } if(nbitfields) bitfields_out(sec, bitfields, &nbitfields, first_bf->ref); free(bitfields); /* need to pad to struct size */ asm_declare_pad(sec, sue_size(sue, NULL) - end_of_last, "struct tail"); }else if((r = type_is(tfor, type_array))){ size_t i, len; decl_init **p; type *next = type_next(tfor); UCC_ASSERT(init->type == decl_init_brace, "unbraced struct"); if(type_is_incomplete_array(tfor)){ len = dynarray_count(init->bits.ar.inits); }else{ UCC_ASSERT(type_is_complete(tfor), "incomplete array/type init"); len = type_array_len(tfor); } for(i = len, p = init->bits.ar.inits; i > 0; i--) { decl_init *this = NULL; if(*p){ this = *p++; if(this != DYNARRAY_NULL && this->type == decl_init_copy){ /*fprintf(f, "# copy from %lu\n", DECL_INIT_COPY_IDX(this, init));*/ struct init_cpy *icpy = *this->bits.range_copy; /* resolve the copy */ this = icpy->range_init; } } asm_declare_init(sec, this, next); } }else if((r = type_is_primitive(tfor, type_union))){ /* union inits are decl_init_brace with spaces up to the first union init, * then NULL/end of the init-array */ struct_union_enum_st *sue = type_is_s_or_u(r); unsigned i, sub = 0; decl_init *u_init; UCC_ASSERT(init->type == decl_init_brace, "brace init expected"); /* skip the empties until we get to one */ for(i = 0; init->bits.ar.inits[i] == DYNARRAY_NULL; i++); if((u_init = init->bits.ar.inits[i])){ decl *mem = sue->members[i]->struct_member; type *mem_r = mem->ref; /* union init, member at index `i' */ if(mem->bits.var.field_width){ /* we know it's integral */ struct bitfield_val bfv; ASSERT_SCALAR(u_init); bitfield_val_set(&bfv, u_init->bits.expr, mem->bits.var.field_width); asm_declare_init_bitfields(sec, &bfv, 1, mem_r); }else{ asm_declare_init(sec, u_init, mem_r); } sub = type_size(mem_r, NULL); } /* else null union init */ asm_declare_pad(sec, type_size(r, NULL) - sub, "union extra"); }else{ /* scalar */ expr *exp = init->bits.expr; UCC_ASSERT(init->type == decl_init_scalar, "scalar init expected"); /* exp->tree_type should match tfor */ { char buf[TYPE_STATIC_BUFSIZ]; UCC_ASSERT( type_cmp(exp->tree_type, tfor, TYPE_CMP_ALLOW_TENATIVE_ARRAY) != TYPE_NOT_EQUAL, "mismatching init types: %s and %s", type_to_str_r(buf, exp->tree_type), type_to_str(tfor)); } /* use tfor, since "abc" has type (char[]){(int)'a', (int)'b', ...} */ DEBUG(" scalar init for %s:", type_to_str(tfor)); static_val(sec, tfor, exp); } }