Example #1
0
static void
build_field (segment_info *h, tree union_type, record_layout_info rli)
{
  tree field;
  tree name;
  HOST_WIDE_INT offset = h->offset;
  unsigned HOST_WIDE_INT desired_align, known_align;

  name = get_identifier (h->sym->name);
  field = build_decl (FIELD_DECL, name, h->field);
  gfc_set_decl_location (field, &h->sym->declared_at);
  known_align = (offset & -offset) * BITS_PER_UNIT;
  if (known_align == 0 || known_align > BIGGEST_ALIGNMENT)
    known_align = BIGGEST_ALIGNMENT;

  desired_align = update_alignment_for_field (rli, field, known_align);
  if (desired_align > known_align)
    DECL_PACKED (field) = 1;

  DECL_FIELD_CONTEXT (field) = union_type;
  DECL_FIELD_OFFSET (field) = size_int (offset);
  DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node;
  SET_DECL_OFFSET_ALIGN (field, known_align);

  rli->offset = size_binop (MAX_EXPR, rli->offset,
                            size_binop (PLUS_EXPR,
                                        DECL_FIELD_OFFSET (field),
                                        DECL_SIZE_UNIT (field)));
  h->field = field;
}
Example #2
0
static void
build_field (segment_info *h, tree union_type, record_layout_info rli)
{
  tree field;
  tree name;
  HOST_WIDE_INT offset = h->offset;
  unsigned HOST_WIDE_INT desired_align, known_align;

  name = get_identifier (h->sym->name);
  field = build_decl (h->sym->declared_at.lb->location,
		      FIELD_DECL, name, h->field);
  known_align = (offset & -offset) * BITS_PER_UNIT;
  if (known_align == 0 || known_align > BIGGEST_ALIGNMENT)
    known_align = BIGGEST_ALIGNMENT;

  desired_align = update_alignment_for_field (rli, field, known_align);
  if (desired_align > known_align)
    DECL_PACKED (field) = 1;

  DECL_FIELD_CONTEXT (field) = union_type;
  DECL_FIELD_OFFSET (field) = size_int (offset);
  DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node;
  SET_DECL_OFFSET_ALIGN (field, known_align);

  rli->offset = size_binop (MAX_EXPR, rli->offset,
                            size_binop (PLUS_EXPR,
                                        DECL_FIELD_OFFSET (field),
                                        DECL_SIZE_UNIT (field)));
  /* If this field is assigned to a label, we create another two variables.
     One will hold the address of target label or format label. The other will
     hold the length of format label string.  */
  if (h->sym->attr.assign)
    {
      tree len;
      tree addr;

      gfc_allocate_lang_decl (field);
      GFC_DECL_ASSIGN (field) = 1;
      len = gfc_create_var_np (gfc_charlen_type_node,h->sym->name);
      addr = gfc_create_var_np (pvoid_type_node, h->sym->name);
      TREE_STATIC (len) = 1;
      TREE_STATIC (addr) = 1;
      DECL_INITIAL (len) = build_int_cst (NULL_TREE, -2);
      gfc_set_decl_location (len, &h->sym->declared_at);
      gfc_set_decl_location (addr, &h->sym->declared_at);
      GFC_DECL_STRING_LEN (field) = pushdecl_top_level (len);
      GFC_DECL_ASSIGN_ADDR (field) = pushdecl_top_level (addr);
    }

  /* If this field is volatile, mark it.  */
  if (h->sym->attr.volatile_)
    {
      tree new_type;
      TREE_THIS_VOLATILE (field) = 1;
      new_type = build_qualified_type (TREE_TYPE (field), TYPE_QUAL_VOLATILE);
      TREE_TYPE (field) = new_type;
    }

  h->field = field;
}
Example #3
0
static tree
compute_object_offset (tree expr, tree var)
{
  enum tree_code code = PLUS_EXPR;
  tree base, off, t;

  if (expr == var)
    return size_zero_node;

  switch (TREE_CODE (expr))
    {
    case COMPONENT_REF:
      base = compute_object_offset (TREE_OPERAND (expr, 0), var);
      if (base == error_mark_node)
	return base;

      t = TREE_OPERAND (expr, 1);
      off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
			size_int (tree_low_cst (DECL_FIELD_BIT_OFFSET (t), 1)
				  / BITS_PER_UNIT));
      break;

    case REALPART_EXPR:
    case NOP_EXPR:
    case CONVERT_EXPR:
    case VIEW_CONVERT_EXPR:
    case NON_LVALUE_EXPR:
      return compute_object_offset (TREE_OPERAND (expr, 0), var);

    case IMAGPART_EXPR:
      base = compute_object_offset (TREE_OPERAND (expr, 0), var);
      if (base == error_mark_node)
	return base;

      off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
      break;

    case ARRAY_REF:
      base = compute_object_offset (TREE_OPERAND (expr, 0), var);
      if (base == error_mark_node)
	return base;

      t = TREE_OPERAND (expr, 1);
      if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
	{
	  code = MINUS_EXPR;
	  t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
	}
      t = convert (sizetype, t);
      off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
      break;

    default:
      return error_mark_node;
    }

  return size_binop (code, base, off);
}
Example #4
0
File: expr.c Project: Gd58/gcc
tree
cplus_expand_constant (tree cst)
{
  switch (TREE_CODE (cst))
    {
    case PTRMEM_CST:
      {
	tree type = TREE_TYPE (cst);
	tree member;

	/* Find the member.  */
	member = PTRMEM_CST_MEMBER (cst);

	/* We can't lower this until the class is complete.  */
	if (!COMPLETE_TYPE_P (DECL_CONTEXT (member)))
	  return cst;

	if (TREE_CODE (member) == FIELD_DECL)
	  {
	    /* Find the offset for the field.  */
	    cst = byte_position (member);
	    while (!same_type_p (DECL_CONTEXT (member),
				 TYPE_PTRMEM_CLASS_TYPE (type)))
	      {
		/* The MEMBER must have been nestled within an
		   anonymous aggregate contained in TYPE.  Find the
		   anonymous aggregate.  */
		member = lookup_anon_field (TYPE_PTRMEM_CLASS_TYPE (type),
					    DECL_CONTEXT (member));
		cst = size_binop (PLUS_EXPR, cst, byte_position (member));
	      }
	    cst = fold (build_nop (type, cst));
	  }
	else
	  {
	    tree delta;
	    tree pfn;

	    expand_ptrmemfunc_cst (cst, &delta, &pfn);
	    cst = build_ptrmemfunc1 (type, delta, pfn);
	  }
      }
      break;

    case CONSTRUCTOR:
      {
	constructor_elt *elt;
	unsigned HOST_WIDE_INT idx;
	FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (cst), idx, elt)
	  elt->value = cplus_expand_constant (elt->value);
      }

    default:
      /* There's nothing to do.  */
      break;
    }

  return cst;
}
Example #5
0
static unsigned HOST_WIDE_INT
alloc_object_size (const_gimple call, int object_size_type)
{
  tree callee, bytes = NULL_TREE;
  tree alloc_size;
  int arg1 = -1, arg2 = -1;

  gcc_assert (is_gimple_call (call));

  callee = gimple_call_fndecl (call);
  if (!callee)
    return unknown[object_size_type];

  alloc_size = lookup_attribute ("alloc_size",
				 TYPE_ATTRIBUTES (TREE_TYPE (callee)));
  if (alloc_size && TREE_VALUE (alloc_size))
    {
      tree p = TREE_VALUE (alloc_size);

      arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
      if (TREE_CHAIN (p))
        arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
    }

  if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
    switch (DECL_FUNCTION_CODE (callee))
      {
      case BUILT_IN_CALLOC:
	arg2 = 1;
	/* fall through */
      case BUILT_IN_MALLOC:
      case BUILT_IN_ALLOCA:
      case BUILT_IN_ALLOCA_WITH_ALIGN:
	arg1 = 0;
      default:
	break;
      }

  if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
      || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
      || (arg2 >= 0
	  && (arg2 >= (int)gimple_call_num_args (call)
	      || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
    return unknown[object_size_type];

  if (arg2 >= 0)
    bytes = size_binop (MULT_EXPR,
	fold_convert (sizetype, gimple_call_arg (call, arg1)),
	fold_convert (sizetype, gimple_call_arg (call, arg2)));
  else if (arg1 >= 0)
    bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));

  if (bytes && host_integerp (bytes, 1))
    return tree_low_cst (bytes, 1);

  return unknown[object_size_type];
}
tree
build_array_notation_ref (location_t loc, tree array, tree start, tree length, 
			  tree stride, tree type)
{
  tree array_ntn_expr = NULL_TREE;

  /* If we enter the then-case of the if-statement below, we have hit a case 
     like this: ARRAY [:].  */
  if (!start && !length)
    {
      if (TREE_CODE (type) != ARRAY_TYPE)
	{
	  error_at (loc, "start-index and length fields necessary for "
		    "using array notation in pointers or records");
	  return error_mark_node;
	}
      tree domain = TYPE_DOMAIN (type);
      if (!domain)
	{
	  error_at (loc, "start-index and length fields necessary for "
		    "using array notation with array of unknown bound");
	  return error_mark_node;
	}
      start = cp_fold_convert (ptrdiff_type_node, TYPE_MINVAL (domain));
      length = size_binop (PLUS_EXPR, TYPE_MAXVAL (domain), size_one_node);
      length = cp_fold_convert (ptrdiff_type_node, length);
    }
    
  if (!stride) 
    stride = build_one_cst (ptrdiff_type_node);
  
  /* When dealing with templates, triplet type-checking will be done in pt.c 
     after type substitution.  */
  if (processing_template_decl 
      && (type_dependent_expression_p (array) 
	  || type_dependent_expression_p (length) 
	  || type_dependent_expression_p (start) 
	  || type_dependent_expression_p (stride))) 
    array_ntn_expr = build_min_nt_loc (loc, ARRAY_NOTATION_REF, array, start, 
				       length, stride, NULL_TREE);
  else 
    { 
      if (!cilkplus_an_triplet_types_ok_p (loc, start, length, stride, type))
	return error_mark_node;
      array_ntn_expr = build4 (ARRAY_NOTATION_REF, NULL_TREE, array, start, 
			       length, stride);
    }
  if (TREE_CODE (type) == ARRAY_TYPE || TREE_CODE (type) == POINTER_TYPE)
    TREE_TYPE (array_ntn_expr) = TREE_TYPE (type);
  else
    gcc_unreachable ();

  SET_EXPR_LOCATION (array_ntn_expr, loc);
  return array_ntn_expr;
}
Example #7
0
static unsigned HOST_WIDE_INT
alloc_object_size (tree call, int object_size_type)
{
  tree callee, arglist, a, bytes = NULL_TREE;
  unsigned int arg_mask = 0;

  gcc_assert (TREE_CODE (call) == CALL_EXPR);

  callee = get_callee_fndecl (call);
  arglist = TREE_OPERAND (call, 1);
  if (callee
      && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
    switch (DECL_FUNCTION_CODE (callee))
      {
      case BUILT_IN_MALLOC:
      case BUILT_IN_ALLOCA:
	arg_mask = 1;
	break;
      /*
      case BUILT_IN_REALLOC:
	arg_mask = 2;
	break;
	*/
      case BUILT_IN_CALLOC:
	arg_mask = 3;
	break;
      default:
	break;
      }

  for (a = arglist; arg_mask && a; arg_mask >>= 1, a = TREE_CHAIN (a))
    if (arg_mask & 1)
      {
	tree arg = TREE_VALUE (a);

	if (TREE_CODE (arg) != INTEGER_CST)
	  break;

	if (! bytes)
	  bytes = fold_convert (sizetype, arg);
	else
	  bytes = size_binop (MULT_EXPR, bytes,
			      fold_convert (sizetype, arg));
      }

  if (! arg_mask && bytes && host_integerp (bytes, 1))
    return tree_low_cst (bytes, 1);

  return unknown[object_size_type];
}
Example #8
0
static void
generate_memset_zero (gimple stmt, tree op0, tree nb_iter,
		      gimple_stmt_iterator bsi)
{
  tree addr_base, nb_bytes;
  bool res = false;
  gimple_seq stmt_list = NULL, stmts;
  gimple fn_call;
  tree mem, fn;
  struct data_reference *dr = XCNEW (struct data_reference);
  location_t loc = gimple_location (stmt);

  DR_STMT (dr) = stmt;
  DR_REF (dr) = op0;
  res = dr_analyze_innermost (dr);
  gcc_assert (res && stride_of_unit_type_p (DR_STEP (dr), TREE_TYPE (op0)));

  nb_bytes = build_size_arg_loc (loc, nb_iter, op0, &stmt_list);
  addr_base = size_binop_loc (loc, PLUS_EXPR, DR_OFFSET (dr), DR_INIT (dr));
  addr_base = fold_convert_loc (loc, sizetype, addr_base);

  /* Test for a negative stride, iterating over every element.  */
  if (integer_zerop (size_binop (PLUS_EXPR,
				 TYPE_SIZE_UNIT (TREE_TYPE (op0)),
				 fold_convert (sizetype, DR_STEP (dr)))))
    {
      addr_base = size_binop_loc (loc, MINUS_EXPR, addr_base,
				  fold_convert_loc (loc, sizetype, nb_bytes));
      addr_base = size_binop_loc (loc, PLUS_EXPR, addr_base,
				  TYPE_SIZE_UNIT (TREE_TYPE (op0)));
    }

  addr_base = fold_build2_loc (loc, POINTER_PLUS_EXPR,
			       TREE_TYPE (DR_BASE_ADDRESS (dr)),
			       DR_BASE_ADDRESS (dr), addr_base);
  mem = force_gimple_operand (addr_base, &stmts, true, NULL);
  gimple_seq_add_seq (&stmt_list, stmts);

  fn = build_fold_addr_expr (implicit_built_in_decls [BUILT_IN_MEMSET]);
  fn_call = gimple_build_call (fn, 3, mem, integer_zero_node, nb_bytes);
  gimple_seq_add_stmt (&stmt_list, fn_call);
  gsi_insert_seq_after (&bsi, stmt_list, GSI_CONTINUE_LINKING);

  if (dump_file && (dump_flags & TDF_DETAILS))
    fprintf (dump_file, "generated memset zero\n");

  free_data_ref (dr);
}
Example #9
0
tree
cplus_expand_constant (tree cst)
{
    switch (TREE_CODE (cst))
    {
    case PTRMEM_CST:
    {
        tree type = TREE_TYPE (cst);
        tree member;

        /* Find the member.  */
        member = PTRMEM_CST_MEMBER (cst);

        if (TREE_CODE (member) == FIELD_DECL)
        {
            /* Find the offset for the field.  */
            cst = byte_position (member);
            while (!same_type_p (DECL_CONTEXT (member),
                                 TYPE_PTRMEM_CLASS_TYPE (type)))
            {
                /* The MEMBER must have been nestled within an
                   anonymous aggregate contained in TYPE.  Find the
                   anonymous aggregate.  */
                member = lookup_anon_field (TYPE_PTRMEM_CLASS_TYPE (type),
                                            DECL_CONTEXT (member));
                cst = size_binop (PLUS_EXPR, cst, byte_position (member));
            }
            cst = fold (build_nop (type, cst));
        }
        else
        {
            tree delta;
            tree pfn;

            expand_ptrmemfunc_cst (cst, &delta, &pfn);
            cst = build_ptrmemfunc1 (type, delta, pfn);
        }
    }
    break;

    default:
        /* There's nothing to do.  */
        break;
    }

    return cst;
}
Example #10
0
tree
make_thunk (tree function, bool this_adjusting,
            tree fixed_offset, tree virtual_offset)
{
    HOST_WIDE_INT d;
    tree thunk;

    gcc_assert (TREE_CODE (function) == FUNCTION_DECL);
    /* We can have this thunks to covariant thunks, but not vice versa.  */
    gcc_assert (!DECL_THIS_THUNK_P (function));
    gcc_assert (!DECL_RESULT_THUNK_P (function) || this_adjusting);

    /* Scale the VIRTUAL_OFFSET to be in terms of bytes.  */
    if (this_adjusting && virtual_offset)
        virtual_offset
            = size_binop (MULT_EXPR,
                          virtual_offset,
                          convert (ssizetype,
                                   TYPE_SIZE_UNIT (vtable_entry_type)));

    d = tree_low_cst (fixed_offset, 0);

    /* See if we already have the thunk in question.  For this_adjusting
       thunks VIRTUAL_OFFSET will be an INTEGER_CST, for covariant thunks it
       will be a BINFO.  */
    for (thunk = DECL_THUNKS (function); thunk; thunk = TREE_CHAIN (thunk))
        if (DECL_THIS_THUNK_P (thunk) == this_adjusting
                && THUNK_FIXED_OFFSET (thunk) == d
                && !virtual_offset == !THUNK_VIRTUAL_OFFSET (thunk)
                && (!virtual_offset
                    || (this_adjusting
                        ? tree_int_cst_equal (THUNK_VIRTUAL_OFFSET (thunk),
                                              virtual_offset)
                        : THUNK_VIRTUAL_OFFSET (thunk) == virtual_offset)))
            return thunk;

    /* All thunks must be created before FUNCTION is actually emitted;
       the ABI requires that all thunks be emitted together with the
       function to which they transfer control.  */
    gcc_assert (!TREE_ASM_WRITTEN (function));
    /* Likewise, we can only be adding thunks to a function declared in
       the class currently being laid out.  */
    gcc_assert (TYPE_SIZE (DECL_CONTEXT (function))
                && TYPE_BEING_DEFINED (DECL_CONTEXT (function)));

    thunk = build_decl (FUNCTION_DECL, NULL_TREE, TREE_TYPE (function));
    DECL_LANG_SPECIFIC (thunk) = DECL_LANG_SPECIFIC (function);
    cxx_dup_lang_specific_decl (thunk);
    DECL_THUNKS (thunk) = NULL_TREE;

    DECL_CONTEXT (thunk) = DECL_CONTEXT (function);
    TREE_READONLY (thunk) = TREE_READONLY (function);
    TREE_THIS_VOLATILE (thunk) = TREE_THIS_VOLATILE (function);
    TREE_PUBLIC (thunk) = TREE_PUBLIC (function);
    SET_DECL_THUNK_P (thunk, this_adjusting);
    THUNK_TARGET (thunk) = function;
    THUNK_FIXED_OFFSET (thunk) = d;
    THUNK_VIRTUAL_OFFSET (thunk) = virtual_offset;
    THUNK_ALIAS (thunk) = NULL_TREE;

    /* The thunk itself is not a constructor or destructor, even if
       the thing it is thunking to is.  */
    DECL_INTERFACE_KNOWN (thunk) = 1;
    DECL_NOT_REALLY_EXTERN (thunk) = 1;
    DECL_SAVED_FUNCTION_DATA (thunk) = NULL;
    DECL_DESTRUCTOR_P (thunk) = 0;
    DECL_CONSTRUCTOR_P (thunk) = 0;
    DECL_EXTERNAL (thunk) = 1;
    DECL_ARTIFICIAL (thunk) = 1;
    /* Even if this thunk is a member of a local class, we don't
       need a static chain.  */
    DECL_NO_STATIC_CHAIN (thunk) = 1;
    /* The THUNK is not a pending inline, even if the FUNCTION is.  */
    DECL_PENDING_INLINE_P (thunk) = 0;
    DECL_INLINE (thunk) = 0;
    DECL_DECLARED_INLINE_P (thunk) = 0;
    /* Nor has it been deferred.  */
    DECL_DEFERRED_FN (thunk) = 0;
    /* Nor is it a template instantiation.  */
    DECL_USE_TEMPLATE (thunk) = 0;
    DECL_TEMPLATE_INFO (thunk) = NULL;

    /* Add it to the list of thunks associated with FUNCTION.  */
    TREE_CHAIN (thunk) = DECL_THUNKS (function);
    DECL_THUNKS (function) = thunk;

    return thunk;
}
Example #11
0
static unsigned HOST_WIDE_INT
addr_object_size (tree ptr, int object_size_type)
{
  tree pt_var;

  gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);

  pt_var = TREE_OPERAND (ptr, 0);
  if (REFERENCE_CLASS_P (pt_var))
    pt_var = get_base_address (pt_var);

  if (pt_var
      && (SSA_VAR_P (pt_var) || TREE_CODE (pt_var) == STRING_CST)
      && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
      && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
      && (unsigned HOST_WIDE_INT)
	 tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1) < offset_limit)
    {
      tree bytes;

      if (pt_var != TREE_OPERAND (ptr, 0))
	{
	  tree var;

	  if (object_size_type & 1)
	    {
	      var = TREE_OPERAND (ptr, 0);

	      while (var != pt_var
		      && TREE_CODE (var) != BIT_FIELD_REF
		      && TREE_CODE (var) != COMPONENT_REF
		      && TREE_CODE (var) != ARRAY_REF
		      && TREE_CODE (var) != ARRAY_RANGE_REF
		      && TREE_CODE (var) != REALPART_EXPR
		      && TREE_CODE (var) != IMAGPART_EXPR)
		var = TREE_OPERAND (var, 0);
	      if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
		var = TREE_OPERAND (var, 0);
	      if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
		  || ! host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (var)), 1)
		  || tree_int_cst_lt (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)),
				      TYPE_SIZE_UNIT (TREE_TYPE (var))))
		var = pt_var;
	    }
	  else
	    var = pt_var;

	  bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
	  if (bytes != error_mark_node)
	    {
	      if (TREE_CODE (bytes) == INTEGER_CST
		  && tree_int_cst_lt (TYPE_SIZE_UNIT (TREE_TYPE (var)), bytes))
		bytes = size_zero_node;
	      else
		bytes = size_binop (MINUS_EXPR,
				    TYPE_SIZE_UNIT (TREE_TYPE (var)), bytes);
	    }
	}
      else
	bytes = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));

      if (host_integerp (bytes, 1))
	return tree_low_cst (bytes, 1);
    }

  return unknown[object_size_type];
}
Example #12
0
/* Find memcpy, mempcpy, memmove and memset calls, perform
   checks before call and then call no_chk version of
   functions.  We do it on O2 to enable inlining of these
   functions during expand.

   Also try to find memcpy, mempcpy, memmove and memset calls
   which are known to not write pointers to memory and use
   faster function versions for them.  */
static void
chkp_optimize_string_function_calls (void)
{
    basic_block bb;

    if (dump_file && (dump_flags & TDF_DETAILS))
        fprintf (dump_file, "Searching for replaceable string function calls...\n");

    FOR_EACH_BB_FN (bb, cfun)
    {
        gimple_stmt_iterator i;

        for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
        {
            gimple *stmt = gsi_stmt (i);
            tree fndecl;

            if (gimple_code (stmt) != GIMPLE_CALL
                    || !gimple_call_with_bounds_p (stmt))
                continue;

            fndecl = gimple_call_fndecl (stmt);

            if (!fndecl || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
                continue;

            if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMCPY_CHKP
                    || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHKP
                    || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMMOVE_CHKP
                    || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHKP)
            {
                tree dst = gimple_call_arg (stmt, 0);
                tree dst_bnd = gimple_call_arg (stmt, 1);
                bool is_memset = DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHKP;
                tree size = gimple_call_arg (stmt, is_memset ? 3 : 4);
                tree fndecl_nochk;
                gimple_stmt_iterator j;
                basic_block check_bb;
                address_t size_val;
                int sign;
                bool known;

                /* We may replace call with corresponding __chkp_*_nobnd
                call in case destination pointer base type is not
                 void or pointer.  */
                if (POINTER_TYPE_P (TREE_TYPE (dst))
                        && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (dst)))
                        && !chkp_type_has_pointer (TREE_TYPE (TREE_TYPE (dst))))
                {
                    tree fndecl_nobnd
                        = chkp_get_nobnd_fndecl (DECL_FUNCTION_CODE (fndecl));

                    if (fndecl_nobnd)
                        fndecl = fndecl_nobnd;
                }

                fndecl_nochk = chkp_get_nochk_fndecl (DECL_FUNCTION_CODE (fndecl));

                if (fndecl_nochk)
                    fndecl = fndecl_nochk;

                if (fndecl != gimple_call_fndecl (stmt))
                {
                    if (dump_file && (dump_flags & TDF_DETAILS))
                    {
                        fprintf (dump_file, "Replacing call: ");
                        print_gimple_stmt (dump_file, stmt, 0,
                                           TDF_VOPS|TDF_MEMSYMS);
                    }

                    gimple_call_set_fndecl (stmt, fndecl);

                    if (dump_file && (dump_flags & TDF_DETAILS))
                    {
                        fprintf (dump_file, "With a new call: ");
                        print_gimple_stmt (dump_file, stmt, 0,
                                           TDF_VOPS|TDF_MEMSYMS);
                    }
                }

                /* If there is no nochk version of function then
                do nothing.  Otherwise insert checks before
                 the call.  */
                if (!fndecl_nochk)
                    continue;

                /* If size passed to call is known and > 0
                then we may insert checks unconditionally.  */
                size_val.pol.create (0);
                chkp_collect_value (size, size_val);
                known = chkp_is_constant_addr (size_val, &sign);
                size_val.pol.release ();

                /* If we are not sure size is not zero then we have
                to perform runtime check for size and perform
                 checks only when size is not zero.  */
                if (!known)
                {
                    gimple *check = gimple_build_cond (NE_EXPR,
                                                       size,
                                                       size_zero_node,
                                                       NULL_TREE,
                                                       NULL_TREE);

                    /* Split block before string function call.  */
                    gsi_prev (&i);
                    check_bb = insert_cond_bb (bb, gsi_stmt (i), check);

                    /* Set position for checks.  */
                    j = gsi_last_bb (check_bb);

                    /* The block was splitted and therefore we
                       need to set iterator to its end.  */
                    i = gsi_last_bb (bb);
                }
                /* If size is known to be zero then no checks
                should be performed.  */
                else if (!sign)
                    continue;
                else
                    j = i;

                size = size_binop (MINUS_EXPR, size, size_one_node);
                if (!is_memset)
                {
                    tree src = gimple_call_arg (stmt, 2);
                    tree src_bnd = gimple_call_arg (stmt, 3);

                    chkp_check_mem_access (src, fold_build_pointer_plus (src, size),
                                           src_bnd, j, gimple_location (stmt),
                                           integer_zero_node);
                }

                chkp_check_mem_access (dst, fold_build_pointer_plus (dst, size),
                                       dst_bnd, j, gimple_location (stmt),
                                       integer_one_node);

            }
        }
    }
Example #13
0
static unsigned HOST_WIDE_INT
addr_object_size (struct object_size_info *osi, const_tree ptr,
		  int object_size_type)
{
  tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;

  gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);

  pt_var = TREE_OPERAND (ptr, 0);
  while (handled_component_p (pt_var))
    pt_var = TREE_OPERAND (pt_var, 0);

  if (pt_var
      && TREE_CODE (pt_var) == MEM_REF)
    {
      unsigned HOST_WIDE_INT sz;

      if (!osi || (object_size_type & 1) != 0
	  || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
	{
	  sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
					    object_size_type & ~1);
	}
      else
	{
	  tree var = TREE_OPERAND (pt_var, 0);
	  if (osi->pass == 0)
	    collect_object_sizes_for (osi, var);
	  if (bitmap_bit_p (computed[object_size_type],
			    SSA_NAME_VERSION (var)))
	    sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
	  else
	    sz = unknown[object_size_type];
	}
      if (sz != unknown[object_size_type])
	{
	  double_int dsz = double_int::from_uhwi (sz) - mem_ref_offset (pt_var);
	  if (dsz.is_negative ())
	    sz = 0;
	  else if (dsz.fits_uhwi ())
	    sz = dsz.to_uhwi ();
	  else
	    sz = unknown[object_size_type];
	}

      if (sz != unknown[object_size_type] && sz < offset_limit)
	pt_var_size = size_int (sz);
    }
  else if (pt_var
	   && DECL_P (pt_var)
	   && host_integerp (DECL_SIZE_UNIT (pt_var), 1)
	   && (unsigned HOST_WIDE_INT)
	        tree_low_cst (DECL_SIZE_UNIT (pt_var), 1) < offset_limit)
    pt_var_size = DECL_SIZE_UNIT (pt_var);
  else if (pt_var
	   && TREE_CODE (pt_var) == STRING_CST
	   && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
	   && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
	   && (unsigned HOST_WIDE_INT)
	      tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
	      < offset_limit)
    pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
  else
    return unknown[object_size_type];

  if (pt_var != TREE_OPERAND (ptr, 0))
    {
      tree var;

      if (object_size_type & 1)
	{
	  var = TREE_OPERAND (ptr, 0);

	  while (var != pt_var
		 && TREE_CODE (var) != BIT_FIELD_REF
		 && TREE_CODE (var) != COMPONENT_REF
		 && TREE_CODE (var) != ARRAY_REF
		 && TREE_CODE (var) != ARRAY_RANGE_REF
		 && TREE_CODE (var) != REALPART_EXPR
		 && TREE_CODE (var) != IMAGPART_EXPR)
	    var = TREE_OPERAND (var, 0);
	  if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
	    var = TREE_OPERAND (var, 0);
	  if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
	      || ! host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (var)), 1)
	      || (pt_var_size
		  && tree_int_cst_lt (pt_var_size,
				      TYPE_SIZE_UNIT (TREE_TYPE (var)))))
	    var = pt_var;
	  else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
	    {
	      tree v = var;
	      /* For &X->fld, compute object size only if fld isn't the last
		 field, as struct { int i; char c[1]; } is often used instead
		 of flexible array member.  */
	      while (v && v != pt_var)
		switch (TREE_CODE (v))
		  {
		  case ARRAY_REF:
		    if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
			&& TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
		      {
			tree domain
			  = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
			if (domain
			    && TYPE_MAX_VALUE (domain)
			    && TREE_CODE (TYPE_MAX_VALUE (domain))
			       == INTEGER_CST
			    && tree_int_cst_lt (TREE_OPERAND (v, 1),
						TYPE_MAX_VALUE (domain)))
			  {
			    v = NULL_TREE;
			    break;
			  }
		      }
		    v = TREE_OPERAND (v, 0);
		    break;
		  case REALPART_EXPR:
		  case IMAGPART_EXPR:
		    v = NULL_TREE;
		    break;
		  case COMPONENT_REF:
		    if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
		      {
			v = NULL_TREE;
			break;
		      }
		    while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
		      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
			  != UNION_TYPE
			  && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
			  != QUAL_UNION_TYPE)
			break;
		      else
			v = TREE_OPERAND (v, 0);
		    if (TREE_CODE (v) == COMPONENT_REF
			&& TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
			   == RECORD_TYPE)
		      {
			tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
			for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
			  if (TREE_CODE (fld_chain) == FIELD_DECL)
			    break;

			if (fld_chain)
			  {
			    v = NULL_TREE;
			    break;
			  }
			v = TREE_OPERAND (v, 0);
		      }
		    while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
		      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
			  != UNION_TYPE
			  && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
			  != QUAL_UNION_TYPE)
			break;
		      else
			v = TREE_OPERAND (v, 0);
		    if (v != pt_var)
		      v = NULL_TREE;
		    else
		      v = pt_var;
		    break;
		  default:
		    v = pt_var;
		    break;
		  }
	      if (v == pt_var)
		var = pt_var;
	    }
	}
      else
	var = pt_var;

      if (var != pt_var)
	var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
      else if (!pt_var_size)
	return unknown[object_size_type];
      else
	var_size = pt_var_size;
      bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
      if (bytes != error_mark_node)
	{
	  if (TREE_CODE (bytes) == INTEGER_CST
	      && tree_int_cst_lt (var_size, bytes))
	    bytes = size_zero_node;
	  else
	    bytes = size_binop (MINUS_EXPR, var_size, bytes);
	}
      if (var != pt_var
	  && pt_var_size
	  && TREE_CODE (pt_var) == MEM_REF
	  && bytes != error_mark_node)
	{
	  tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
	  if (bytes2 != error_mark_node)
	    {
	      if (TREE_CODE (bytes2) == INTEGER_CST
		  && tree_int_cst_lt (pt_var_size, bytes2))
		bytes2 = size_zero_node;
	      else
		bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
	      bytes = size_binop (MIN_EXPR, bytes, bytes2);
	    }
	}
    }
  else if (!pt_var_size)
    return unknown[object_size_type];
  else
    bytes = pt_var_size;

  if (host_integerp (bytes, 1))
    return tree_low_cst (bytes, 1);

  return unknown[object_size_type];
}
Example #14
0
static tree
tinfo_base_init (tree desc, tree target)
{
  tree init = NULL_TREE;
  tree name_decl;
  tree vtable_ptr;
  
  {
    tree name_name;
    
    /* Generate the NTBS array variable.  */
    tree name_type = build_cplus_array_type
                     (build_qualified_type (char_type_node, TYPE_QUAL_CONST),
                     NULL_TREE);
    tree name_string = tinfo_name (target);

    /* Determine the name of the variable -- and remember with which
       type it is associated.  */
    name_name = mangle_typeinfo_string_for_type (target);
    TREE_TYPE (name_name) = target;

    name_decl = build_lang_decl (VAR_DECL, name_name, name_type);
    SET_DECL_ASSEMBLER_NAME (name_decl, name_name);
    DECL_ARTIFICIAL (name_decl) = 1;
    DECL_IGNORED_P (name_decl) = 1;
    TREE_READONLY (name_decl) = 1;
    TREE_STATIC (name_decl) = 1;
    DECL_EXTERNAL (name_decl) = 0;
    DECL_TINFO_P (name_decl) = 1;
    if (involves_incomplete_p (target))
      {
	TREE_PUBLIC (name_decl) = 0;
	DECL_INTERFACE_KNOWN (name_decl) = 1;
      }
    else
      set_linkage_according_to_type (target, name_decl);
    import_export_decl (name_decl);
    DECL_INITIAL (name_decl) = name_string;
    mark_used (name_decl);
    pushdecl_top_level_and_finish (name_decl, name_string);
  }

  vtable_ptr = TINFO_VTABLE_DECL (desc);
  if (!vtable_ptr)
    {
      tree real_type;
  
      push_nested_namespace (abi_node);
      real_type = xref_tag (class_type, TINFO_REAL_NAME (desc),
			    /* APPLE LOCAL 4184203 */
			    /*tag_scope=*/ts_global, false);
      pop_nested_namespace (abi_node);
  
      if (!COMPLETE_TYPE_P (real_type))
	{
          /* We never saw a definition of this type, so we need to
	     tell the compiler that this is an exported class, as
	     indeed all of the __*_type_info classes are.  */
	  SET_CLASSTYPE_INTERFACE_KNOWN (real_type);
	  CLASSTYPE_INTERFACE_ONLY (real_type) = 1;
	}

      vtable_ptr = get_vtable_decl (real_type, /*complete=*/1);
      vtable_ptr = build_unary_op (ADDR_EXPR, vtable_ptr, 0);

      /* We need to point into the middle of the vtable.  */
      vtable_ptr = build2
	(PLUS_EXPR, TREE_TYPE (vtable_ptr), vtable_ptr,
	 size_binop (MULT_EXPR,
		     size_int (2 * TARGET_VTABLE_DATA_ENTRY_DISTANCE),
		     TYPE_SIZE_UNIT (vtable_entry_type)));

      TINFO_VTABLE_DECL (desc) = vtable_ptr;
    }

  init = tree_cons (NULL_TREE, vtable_ptr, init);
  
  init = tree_cons (NULL_TREE, decay_conversion (name_decl), init);
  
  init = build_constructor (NULL_TREE, nreverse (init));
  TREE_CONSTANT (init) = 1;
  TREE_INVARIANT (init) = 1;
  TREE_STATIC (init) = 1;
  init = tree_cons (NULL_TREE, init, NULL_TREE);
  
  return init;
}
Example #15
0
File: cvt.c Project: Fokycnuk/gcc
static tree
cp_convert_to_pointer (tree type, tree expr, bool force)
{
  tree intype = TREE_TYPE (expr);
  enum tree_code form;
  tree rval;
  if (intype == error_mark_node)
    return error_mark_node;

  if (IS_AGGR_TYPE (intype))
    {
      intype = complete_type (intype);
      if (!COMPLETE_TYPE_P (intype))
	{
	  error ("can't convert from incomplete type `%T' to `%T'",
		    intype, type);
	  return error_mark_node;
	}

      rval = build_type_conversion (type, expr);
      if (rval)
	{
	  if (rval == error_mark_node)
	    error ("conversion of `%E' from `%T' to `%T' is ambiguous",
		      expr, intype, type);
	  return rval;
	}
    }

  /* Handle anachronistic conversions from (::*)() to cv void* or (*)().  */
  if (TREE_CODE (type) == POINTER_TYPE
      && (TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE
	  || VOID_TYPE_P (TREE_TYPE (type))))
    {
      /* Allow an implicit this pointer for pointer to member
	 functions.  */
      if (TYPE_PTRMEMFUNC_P (intype))
	{
	  if (pedantic || warn_pmf2ptr)
	    pedwarn ("converting from `%T' to `%T'", intype, type);
	  if (TREE_CODE (expr) == PTRMEM_CST)
	    expr = build_address (PTRMEM_CST_MEMBER (expr));
	  else
	    {
	      tree decl = maybe_dummy_object (TYPE_PTRMEM_CLASS_TYPE (intype), 
					      0);
	      decl = build_address (decl);
	      expr = get_member_function_from_ptrfunc (&decl, expr);
	    }
	}
      else if (TREE_CODE (TREE_TYPE (expr)) == METHOD_TYPE)
	{
	  if (pedantic || warn_pmf2ptr)
	    pedwarn ("converting from `%T' to `%T'", intype, type);
	  expr = build_addr_func (expr);
	}
      if (TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
	return build_nop (type, expr);
      intype = TREE_TYPE (expr);
    }

  if (expr == error_mark_node)
    return error_mark_node;

  form = TREE_CODE (intype);

  if (POINTER_TYPE_P (intype))
    {
      intype = TYPE_MAIN_VARIANT (intype);

      if (TYPE_MAIN_VARIANT (type) != intype
	  && TREE_CODE (type) == POINTER_TYPE
	  && TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
	  && IS_AGGR_TYPE (TREE_TYPE (type))
	  && IS_AGGR_TYPE (TREE_TYPE (intype))
	  && TREE_CODE (TREE_TYPE (intype)) == RECORD_TYPE)
	{
	  enum tree_code code = PLUS_EXPR;
	  tree binfo;
	  tree intype_class;
	  tree type_class;
	  bool same_p;

	  intype_class = TREE_TYPE (intype);
	  type_class = TREE_TYPE (type);

	  same_p = same_type_p (TYPE_MAIN_VARIANT (intype_class), 
				TYPE_MAIN_VARIANT (type_class));
	  binfo = NULL_TREE;
	  /* Try derived to base conversion.  */
	  if (!same_p)
	    binfo = lookup_base (intype_class, type_class, ba_check, NULL);
	  if (!same_p && !binfo)
	    {
	      /* Try base to derived conversion.  */
	      binfo = lookup_base (type_class, intype_class, ba_check, NULL);
	      code = MINUS_EXPR;
	    }
	  if (binfo == error_mark_node)
	    return error_mark_node;
	  if (binfo || same_p)
	    {
	      if (binfo)
		expr = build_base_path (code, expr, binfo, 0);
	      /* Add any qualifier conversions.  */
	      return build_nop (type, expr);
	    }
	}

      if (TYPE_PTRMEMFUNC_P (type))
	{
	  error ("cannot convert `%E' from type `%T' to type `%T'",
		    expr, intype, type);
	  return error_mark_node;
	}

      return build_nop (type, expr);
    }
  else if (TYPE_PTRMEM_P (type) && TYPE_PTRMEM_P (intype))
    {
      tree b1; 
      tree b2;
      tree binfo;
      enum tree_code code = PLUS_EXPR;
      base_kind bk;

      b1 = TYPE_PTRMEM_CLASS_TYPE (type);
      b2 = TYPE_PTRMEM_CLASS_TYPE (intype);
      binfo = lookup_base (b1, b2, ba_check, &bk);
      if (!binfo)
	{
	  binfo = lookup_base (b2, b1, ba_check, &bk);
	  code = MINUS_EXPR;
	}
      if (binfo == error_mark_node)
	return error_mark_node;

      if (bk == bk_via_virtual)
	{
	  if (force)
	    warning ("pointer to member cast from `%T' to `%T' is via virtual base",
		     intype, type);
	  else
	    {
	      error ("pointer to member cast from `%T' to `%T' is via virtual base",
		     intype, type);
	      return error_mark_node;
	    }
	  /* This is a reinterpret cast, whose result is unspecified.
	     We choose to do nothing.  */
	  return build1 (NOP_EXPR, type, expr);
	}

      if (TREE_CODE (expr) == PTRMEM_CST)
	expr = cplus_expand_constant (expr);

      if (binfo && !integer_zerop (BINFO_OFFSET (binfo)))
	expr = size_binop (code, 
			   build_nop (sizetype, expr),
			   BINFO_OFFSET (binfo));
      return build_nop (type, expr);
    }
  else if (TYPE_PTRMEMFUNC_P (type) && TYPE_PTRMEMFUNC_P (intype))
    return build_ptrmemfunc (TYPE_PTRMEMFUNC_FN_TYPE (type), expr, 0);
  else if (TYPE_PTRMEMFUNC_P (intype))
    {
      if (!warn_pmf2ptr)
	{
	  if (TREE_CODE (expr) == PTRMEM_CST)
	    return cp_convert_to_pointer (type,
					  PTRMEM_CST_MEMBER (expr),
					  force);
	  else if (TREE_CODE (expr) == OFFSET_REF)
	    {
	      tree object = TREE_OPERAND (expr, 0);
	      return get_member_function_from_ptrfunc (&object,
						       TREE_OPERAND (expr, 1));
	    }
	}
      error ("cannot convert `%E' from type `%T' to type `%T'",
		expr, intype, type);
      return error_mark_node;
    }

  if (integer_zerop (expr))
    {
      if (TYPE_PTRMEMFUNC_P (type))
	return build_ptrmemfunc (TYPE_PTRMEMFUNC_FN_TYPE (type), expr, 0);

      if (TYPE_PTRMEM_P (type))
	/* A NULL pointer-to-member is represented by -1, not by
	   zero.  */
	expr = build_int_2 (-1, -1);
      else
	expr = build_int_2 (0, 0);
      TREE_TYPE (expr) = type;
      /* Fix up the representation of -1 if appropriate.  */
      force_fit_type (expr, 0);
      return expr;
    }
  else if (TYPE_PTR_TO_MEMBER_P (type) && INTEGRAL_CODE_P (form))
    {
      error ("invalid conversion from '%T' to '%T'", intype, type);
      return error_mark_node;
    }

  if (INTEGRAL_CODE_P (form))
    {
      if (TYPE_PRECISION (intype) == POINTER_SIZE)
	return build1 (CONVERT_EXPR, type, expr);
      expr = cp_convert (c_common_type_for_size (POINTER_SIZE, 0), expr);
      /* Modes may be different but sizes should be the same.  */
      if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (expr)))
	  != GET_MODE_SIZE (TYPE_MODE (type)))
	/* There is supposed to be some integral type
	   that is the same width as a pointer.  */
	abort ();
      return convert_to_pointer (type, expr);
    }

  if (type_unknown_p (expr))
    return instantiate_type (type, expr, tf_error | tf_warning);

  error ("cannot convert `%E' from type `%T' to type `%T'",
	    expr, intype, type);
  return error_mark_node;
}
Example #16
0
static tree
compute_object_offset (const_tree expr, const_tree var)
{
  enum tree_code code = PLUS_EXPR;
  tree base, off, t;

  if (expr == var)
    return size_zero_node;

  switch (TREE_CODE (expr))
    {
    case COMPONENT_REF:
      base = compute_object_offset (TREE_OPERAND (expr, 0), var);
      if (base == error_mark_node)
	return base;

      t = TREE_OPERAND (expr, 1);
      off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
			size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
				  / BITS_PER_UNIT));
      break;

    case REALPART_EXPR:
    CASE_CONVERT:
    case VIEW_CONVERT_EXPR:
    case NON_LVALUE_EXPR:
      return compute_object_offset (TREE_OPERAND (expr, 0), var);

    case IMAGPART_EXPR:
      base = compute_object_offset (TREE_OPERAND (expr, 0), var);
      if (base == error_mark_node)
	return base;

      off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
      break;

    case ARRAY_REF:
      base = compute_object_offset (TREE_OPERAND (expr, 0), var);
      if (base == error_mark_node)
	return base;

      t = TREE_OPERAND (expr, 1);
      tree low_bound, unit_size;
      low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
      unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
      if (! integer_zerop (low_bound))
	t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
      if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
	{
	  code = MINUS_EXPR;
	  t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
	}
      t = fold_convert (sizetype, t);
      off = size_binop (MULT_EXPR, unit_size, t);
      break;

    case MEM_REF:
      gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
      return wide_int_to_tree (sizetype, mem_ref_offset (expr));

    default:
      return error_mark_node;
    }

  return size_binop (code, base, off);
}
Example #17
0
static void
mf_xform_derefs_1 (block_stmt_iterator *iter, tree *tp,
                   location_t *locus, tree dirflag)
{
  tree type, base, limit, addr, size, t;

  /* Don't instrument read operations.  */
  if (dirflag == integer_zero_node && flag_mudflap_ignore_reads)
    return;

  /* Don't instrument marked nodes.  */
  if (mf_marked_p (*tp))
    return;

  t = *tp;
  type = TREE_TYPE (t);

  if (type == error_mark_node)
    return;

  size = TYPE_SIZE_UNIT (type);

  switch (TREE_CODE (t))
    {
    case ARRAY_REF:
    case COMPONENT_REF:
      {
        /* This is trickier than it may first appear.  The reason is
           that we are looking at expressions from the "inside out" at
           this point.  We may have a complex nested aggregate/array
           expression (e.g. "a.b[i].c"), maybe with an indirection as
           the leftmost operator ("p->a.b.d"), where instrumentation
           is necessary.  Or we may have an innocent "a.b.c"
           expression that must not be instrumented.  We need to
           recurse all the way down the nesting structure to figure it
           out: looking just at the outer node is not enough.  */          
        tree var;
        int component_ref_only = (TREE_CODE (t) == COMPONENT_REF);
	/* If we have a bitfield component reference, we must note the
	   innermost addressable object in ELT, from which we will
	   construct the byte-addressable bounds of the bitfield.  */
	tree elt = NULL_TREE;
	int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF
			      && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1)));

        /* Iterate to the top of the ARRAY_REF/COMPONENT_REF
           containment hierarchy to find the outermost VAR_DECL.  */
        var = TREE_OPERAND (t, 0);
        while (1)
          {
	    if (bitfield_ref_p && elt == NULL_TREE
		&& (TREE_CODE (var) == ARRAY_REF || TREE_CODE (var) == COMPONENT_REF))
	      elt = var;
	
            if (TREE_CODE (var) == ARRAY_REF)
              {
                component_ref_only = 0;
                var = TREE_OPERAND (var, 0);
              }
            else if (TREE_CODE (var) == COMPONENT_REF)
              var = TREE_OPERAND (var, 0);
            else if (INDIRECT_REF_P (var))
              {
		base = TREE_OPERAND (var, 0);
                break;
              }
            else 
              {
                gcc_assert (TREE_CODE (var) == VAR_DECL 
                            || TREE_CODE (var) == PARM_DECL
                            || TREE_CODE (var) == RESULT_DECL
                            || TREE_CODE (var) == STRING_CST);
                /* Don't instrument this access if the underlying
                   variable is not "eligible".  This test matches
                   those arrays that have only known-valid indexes,
                   and thus are not labeled TREE_ADDRESSABLE.  */
                if (! mf_decl_eligible_p (var) || component_ref_only)
                  return;
                else
		  {
		    base = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (var)), var);
		    break;
		  }
              }
          }

        /* Handle the case of ordinary non-indirection structure
           accesses.  These have only nested COMPONENT_REF nodes (no
           INDIRECT_REF), but pass through the above filter loop.
           Note that it's possible for such a struct variable to match
           the eligible_p test because someone else might take its
           address sometime.  */

        /* We need special processing for bitfield components, because
           their addresses cannot be taken.  */
        if (bitfield_ref_p)
          {
            tree field = TREE_OPERAND (t, 1);

            if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
              size = DECL_SIZE_UNIT (field);
            
	    if (elt)
	      elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)), elt);
            addr = fold_convert (ptr_type_node, elt ? elt : base);
            addr = fold_build2 (PLUS_EXPR, ptr_type_node,
				addr, fold_convert (ptr_type_node,
						    byte_position (field)));
          }
        else
          addr = build1 (ADDR_EXPR, build_pointer_type (type), t);

        limit = fold_build2 (MINUS_EXPR, mf_uintptr_type,
                             fold_build2 (PLUS_EXPR, mf_uintptr_type,
					  convert (mf_uintptr_type, addr),
					  size),
                             integer_one_node);
      }
      break;

    case INDIRECT_REF:
      addr = TREE_OPERAND (t, 0);
      base = addr;
      limit = fold_build2 (MINUS_EXPR, ptr_type_node,
                           fold_build2 (PLUS_EXPR, ptr_type_node, base, size),
                           integer_one_node);
      break;

    case TARGET_MEM_REF:
      addr = tree_mem_ref_addr (ptr_type_node, t);
      base = addr;
      limit = fold_build2 (MINUS_EXPR, ptr_type_node,
			   fold_build2 (PLUS_EXPR, ptr_type_node, base, size),
			   build_int_cst (ptr_type_node, 1));
      break;

    case ARRAY_RANGE_REF:
      warning (0, "mudflap checking not yet implemented for ARRAY_RANGE_REF");
      return;

    case BIT_FIELD_REF:
      /* ??? merge with COMPONENT_REF code above? */
      {
        tree ofs, rem, bpu;

        /* If we're not dereferencing something, then the access
           must be ok.  */
        if (TREE_CODE (TREE_OPERAND (t, 0)) != INDIRECT_REF)
          return;

        bpu = bitsize_int (BITS_PER_UNIT);
        ofs = convert (bitsizetype, TREE_OPERAND (t, 2));
        rem = size_binop (TRUNC_MOD_EXPR, ofs, bpu);
        ofs = size_binop (TRUNC_DIV_EXPR, ofs, bpu);

        size = convert (bitsizetype, TREE_OPERAND (t, 1));
        size = size_binop (PLUS_EXPR, size, rem);
        size = size_binop (CEIL_DIV_EXPR, size, bpu);
        size = convert (sizetype, size);

        addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
        addr = convert (ptr_type_node, addr);
        addr = fold_build2 (PLUS_EXPR, ptr_type_node, addr, ofs);

        base = addr;
        limit = fold_build2 (MINUS_EXPR, ptr_type_node,
                             fold_build2 (PLUS_EXPR, ptr_type_node, base, size),
                             integer_one_node);
      }
      break;

    default:
      return;
    }

  mf_build_check_statement_for (base, limit, iter, locus, dirflag);
}
Example #18
0
static void
mf_xform_derefs_1 (gimple_stmt_iterator *iter, tree *tp,
		location_t location, tree dirflag)
{
	tree type, base=NULL_TREE, limit, addr, size, t, elt=NULL_TREE;
	tree temp, field, offset;
	bool check_red_flag = 0, instrumented = 0;
	tree fncall_param_val;
	gimple is_char_red_call;
	tree temp_instr, type_node;

    // TODO fix this to use our flag
	/* Don't instrument read operations.  */
	if (dirflag == integer_zero_node && flag_mudflap_ignore_reads)
		return;

	DEBUGLOG("TREE_CODE(t) = %s, mf_decl_eligible_p : %d\n", 
			tree_code_name[(int)TREE_CODE(*tp)], mf_decl_eligible_p(*tp));

	t = *tp;
	type = TREE_TYPE (t);

	if (type == error_mark_node)
		return;

	size = TYPE_SIZE_UNIT (type);

	/* Don't instrument marked nodes.  */
	if (mf_marked_p (t) && !mf_decl_eligible_p(t)){
		DEBUGLOG("Returning Here - 1\n");
		return;
	}

    if (TREE_CODE(t) == ADDR_EXPR || \
            TREE_CODE(t) == COMPONENT_REF || \
            TREE_CODE(t) == ARRAY_REF || \
            (TREE_CODE(t) == VAR_DECL && mf_decl_eligible_p(t)))
    {
        DEBUGLOG("------ INSTRUMENTING NODES ---------\n");
        temp = TREE_OPERAND(t, 0);

        if(temp && (TREE_CODE(temp) == STRING_CST || \
                TREE_CODE(temp) == FUNCTION_DECL)) // TODO Check this out? What do you do in this case?
            return;

        DEBUGLOG("TREE_CODE(temp) : %s\n", tree_code_name[(int)TREE_CODE(temp)]);

        if (TREE_CODE(t) == VAR_DECL)
            *tp = mf_walk_n_instrument(tp, &instrumented);
        else
            TREE_OPERAND(t,0) = mf_walk_n_instrument(&(TREE_OPERAND(t,0)), &instrumented);

        if (TREE_CODE(t) == ADDR_EXPR)
            return;
    } 

    DEBUGLOG("Pass2 derefs: entering deref section\n");

    type_node = NULL_TREE;
    //TODO move this to appropriate cases
    t = *tp;
	switch (TREE_CODE (t))
	{
		case ARRAY_REF:
		case COMPONENT_REF: // TODO check if following works for comp refs
			{ 
                DEBUGLOG("------ INSIDE CASE COMPONENT_REF  ---------\n");
                HOST_WIDE_INT bitsize, bitpos;
                tree inner, offset;
                int volatilep, unsignedp;
                enum machine_mode mode1;
                check_red_flag = 1; 
                inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
                        &mode1, &unsignedp, &volatilep, false);
                if (!offset)
                    offset = size_zero_node;
                offset = size_binop (PLUS_EXPR, offset,
                        size_int (bitpos / BITS_PER_UNIT));
                addr = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
                        build1 (ADDR_EXPR, build_pointer_type(type), inner), offset);
                break; // TODO continue?
            }

		case INDIRECT_REF:
			DEBUGLOG("------ INSIDE CASE INDIRECT_REF  ---------\n");
			check_red_flag = 1;
			addr = TREE_OPERAND (t, 0);
            break; // TODO continue?

		case MEM_REF:
			DEBUGLOG("------ INSIDE CASE MEM_REF  ---------\n");
			check_red_flag = 1;
			addr = fold_build2_loc (location, POINTER_PLUS_EXPR, TREE_TYPE (TREE_OPERAND (t, 0)),
					TREE_OPERAND (t, 0), fold_convert (sizetype, TREE_OPERAND (t, 1)));
            break;

		case TARGET_MEM_REF:
			DEBUGLOG("------ INSIDE CASE TARGET_MEM_REF  ---------\n");
			check_red_flag = 1;
			addr = tree_mem_ref_addr (ptr_type_node, t);
			break; // TODO do you want to do this case? find out what it does.

		case ARRAY_RANGE_REF:
			DEBUGLOG("------ INSIDE CASE ARRAY_RANGE_REF  ---------\n");
			DEBUGLOG("------ TODO not handled yet---------\n");
			return;

		case BIT_FIELD_REF:
			DEBUGLOG("------ INSIDE CASE BIT_FIELD_REF  ---------\n");
			DEBUGLOG("------ TODO not handled yet---------\n");
			return;

		default:
			DEBUGLOG("------ INSIDE CASE DEFAULT  ---------\n");
			if(mf_decl_eligible_p(t))
			{
                DEBUGLOG("Do you want to be here?\n");
                return;
				/*if((*tp = mx_xform_instrument_pass2(t)) == NULL_TREE){
					DEBUGLOG("Failed to set tree operand\n");
					return;
				}*/
			}
	}


    // Add the call to is_char_red
    if (check_red_flag) {
        DEBUGLOG("Entering is_char_red\n");
        fncall_param_val = fold_build2_loc (location, MEM_REF, ptr_type_node, addr, \
                            build_int_cst(build_pointer_type(type), 0));
        fncall_param_val = fold_convert_loc (location, unsigned_type_node, fncall_param_val);
        is_char_red_call = gimple_build_call (lbc_is_char_red_fndecl, 3, fncall_param_val, size, \
                            fold_convert_loc(location, ptr_type_node, addr));
        gimple_set_location (is_char_red_call, location);
        //debug_gimple_stmt(is_char_red_call);
        gsi_insert_before (iter, is_char_red_call, GSI_SAME_STMT);
        DEBUGLOG("Done with is_char_red\n");
    }
    DEBUGLOG("Exiting derefs \n");
}
Example #19
0
static tree
get_init_field (segment_info *head, tree union_type, tree *field_init,
		record_layout_info rli)
{
  segment_info *s;
  HOST_WIDE_INT length = 0;
  HOST_WIDE_INT offset = 0;
  unsigned HOST_WIDE_INT known_align, desired_align;
  bool overlap = false;
  tree tmp, field;
  tree init;
  unsigned char *data, *chk;
  VEC(constructor_elt,gc) *v = NULL;

  tree type = unsigned_char_type_node;
  int i;

  /* Obtain the size of the union and check if there are any overlapping
     initializers.  */
  for (s = head; s; s = s->next)
    {
      HOST_WIDE_INT slen = s->offset + s->length;
      if (s->sym->value)
	{
	  if (s->offset < offset)
            overlap = true;
	  offset = slen;
	}
      length = length < slen ? slen : length;
    }

  if (!overlap)
    return NULL_TREE;

  /* Now absorb all the initializer data into a single vector,
     whilst checking for overlapping, unequal values.  */
  data = XCNEWVEC (unsigned char, (size_t)length);
  chk = XCNEWVEC (unsigned char, (size_t)length);

  /* TODO - change this when default initialization is implemented.  */
  memset (data, '\0', (size_t)length);
  memset (chk, '\0', (size_t)length);
  for (s = head; s; s = s->next)
    if (s->sym->value)
      gfc_merge_initializers (s->sym->ts, s->sym->value,
			      &data[s->offset],
			      &chk[s->offset],
			     (size_t)s->length);
  
  for (i = 0; i < length; i++)
    CONSTRUCTOR_APPEND_ELT (v, NULL, build_int_cst (type, data[i]));

  free (data);
  free (chk);

  /* Build a char[length] array to hold the initializers.  Much of what
     follows is borrowed from build_field, above.  */

  tmp = build_int_cst (gfc_array_index_type, length - 1);
  tmp = build_range_type (gfc_array_index_type,
			  gfc_index_zero_node, tmp);
  tmp = build_array_type (type, tmp);
  field = build_decl (gfc_current_locus.lb->location,
		      FIELD_DECL, NULL_TREE, tmp);

  known_align = BIGGEST_ALIGNMENT;

  desired_align = update_alignment_for_field (rli, field, known_align);
  if (desired_align > known_align)
    DECL_PACKED (field) = 1;

  DECL_FIELD_CONTEXT (field) = union_type;
  DECL_FIELD_OFFSET (field) = size_int (0);
  DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node;
  SET_DECL_OFFSET_ALIGN (field, known_align);

  rli->offset = size_binop (MAX_EXPR, rli->offset,
                            size_binop (PLUS_EXPR,
                                        DECL_FIELD_OFFSET (field),
                                        DECL_SIZE_UNIT (field)));

  init = build_constructor (TREE_TYPE (field), v);
  TREE_CONSTANT (init) = 1;

  *field_init = init;

  for (s = head; s; s = s->next)
    {
      if (s->sym->value == NULL)
	continue;

      gfc_free_expr (s->sym->value);
      s->sym->value = NULL;
    }

  return field;
}
static bool
generate_memset_zero (gimple stmt, tree op0, tree nb_iter,
		      gimple_stmt_iterator bsi)
{
  tree t, addr_base;
  tree nb_bytes = NULL;
  bool res = false;
  gimple_seq stmts = NULL, stmt_list = NULL;
  gimple fn_call;
  tree mem, fndecl, fntype, fn;
  gimple_stmt_iterator i;
  ssa_op_iter iter;
  struct data_reference *dr = XCNEW (struct data_reference);

  DR_STMT (dr) = stmt;
  DR_REF (dr) = op0;
  if (!dr_analyze_innermost (dr))
    goto end;

  /* Test for a positive stride, iterating over every element.  */
  if (integer_zerop (fold_build2 (MINUS_EXPR, integer_type_node, DR_STEP (dr),
				  TYPE_SIZE_UNIT (TREE_TYPE (op0)))))
    {
      tree offset = fold_convert (sizetype,
				  size_binop (PLUS_EXPR,
					      DR_OFFSET (dr),
					      DR_INIT (dr)));
      addr_base = fold_build2 (POINTER_PLUS_EXPR,
			       TREE_TYPE (DR_BASE_ADDRESS (dr)),
			       DR_BASE_ADDRESS (dr), offset);
    }

  /* Test for a negative stride, iterating over every element.  */
  else if (integer_zerop (fold_build2 (PLUS_EXPR, integer_type_node,
				       TYPE_SIZE_UNIT (TREE_TYPE (op0)),
				       DR_STEP (dr))))
    {
      nb_bytes = build_size_arg (nb_iter, op0, &stmt_list);
      addr_base = size_binop (PLUS_EXPR, DR_OFFSET (dr), DR_INIT (dr));
      addr_base = fold_build2 (MINUS_EXPR, sizetype, addr_base, nb_bytes);
      addr_base = force_gimple_operand (addr_base, &stmts, true, NULL);
      gimple_seq_add_seq (&stmt_list, stmts);

      addr_base = fold_build2 (POINTER_PLUS_EXPR,
			       TREE_TYPE (DR_BASE_ADDRESS (dr)),
			       DR_BASE_ADDRESS (dr), addr_base);
    }
  else
    goto end;

  mem = force_gimple_operand (addr_base, &stmts, true, NULL);
  gimple_seq_add_seq (&stmt_list, stmts);

  fndecl = implicit_built_in_decls [BUILT_IN_MEMSET];
  fntype = TREE_TYPE (fndecl);
  fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);

  if (!nb_bytes)
      nb_bytes = build_size_arg (nb_iter, op0, &stmt_list);
  fn_call = gimple_build_call (fn, 3, mem, integer_zero_node, nb_bytes);
  gimple_seq_add_stmt (&stmt_list, fn_call);

  for (i = gsi_start (stmt_list); !gsi_end_p (i); gsi_next (&i))
    {
      gimple s = gsi_stmt (i);
      update_stmt_if_modified (s);

      FOR_EACH_SSA_TREE_OPERAND (t, s, iter, SSA_OP_VIRTUAL_DEFS)
	{
	  if (TREE_CODE (t) == SSA_NAME)
	    t = SSA_NAME_VAR (t);
	  mark_sym_for_renaming (t);
	}
    }

  /* Mark also the uses of the VDEFS of STMT to be renamed.  */
  FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, SSA_OP_VIRTUAL_DEFS)
    {
      if (TREE_CODE (t) == SSA_NAME)
	{
	  gimple s;
	  imm_use_iterator imm_iter;

	  FOR_EACH_IMM_USE_STMT (s, imm_iter, t)
	    update_stmt (s);

	  t = SSA_NAME_VAR (t);
	}
      mark_sym_for_renaming (t);
    }

  gsi_insert_seq_after (&bsi, stmt_list, GSI_CONTINUE_LINKING);
  res = true;

  if (dump_file && (dump_flags & TDF_DETAILS))
    fprintf (dump_file, "generated memset zero\n");

  todo |= TODO_rebuild_alias;

 end:
  free_data_ref (dr);
  return res;
}