Пример #1
0
void *slab_cache_alloc(slab_cache_t *c) {
  spinlock_acquire(&c->lock);

  void *obj;
  if (c->empty) {

    obj = c->empty;
    slab_footer_t *f = FOOTER_FOR_PTR(obj);
    mark_used(c, f, obj);

    while (f) {
      c->empty = find_empty_obj(c, f);
      if (c->empty) break;
      f = f->next;
    }

  } else {

    /* No empty pointer - must create a new slab. */
    slab_footer_t *f = c->first;
    c->first = create(c);
    c->first->next = f;
    
    obj = (void*)START_FOR_FOOTER(c->first);
    mark_used(c, c->first, obj);

    c->empty = find_empty_obj(c, c->first);

  }
  if (c->init)
    memcpy(obj, c->init, c->size);

  spinlock_release(&c->lock);
  return obj;
}
Пример #2
0
void
finish_eh_spec_block (tree raw_raises, tree eh_spec_block)
{
  tree raises;

  TREE_OPERAND (eh_spec_block, 0)
    = pop_stmt_list (TREE_OPERAND (eh_spec_block, 0));

  if (TREE_CODE (eh_spec_block) == MUST_NOT_THROW_EXPR)
    return;

  /* Strip cv quals, etc, from the specification types.  */
  for (raises = NULL_TREE;
       raw_raises && TREE_VALUE (raw_raises);
       raw_raises = TREE_CHAIN (raw_raises))
    {
      tree type = prepare_eh_type (TREE_VALUE (raw_raises));
      tree tinfo = eh_type_info (type);

      mark_used (tinfo);
      raises = tree_cons (NULL_TREE, type, raises);
    }

  EH_SPEC_RAISES (eh_spec_block) = raises;
}
Пример #3
0
static tree
get_tinfo_ptr (tree type)
{
  tree decl = get_tinfo_decl (type);

  mark_used (decl);
  return build_nop (type_info_ptr_type, 
		    build_address (decl));
}
Пример #4
0
void freespace_block_add(freespace* fs, int x0, int y0, int w0, int h0 )
{
    int i;

    for (i = 0; i < MAX_BLOCK_AREAS; ++i)
    {
        /* search for existing block which matches */

        if (fs->blklist[i].w
         && fs->blklist[i].x == x0
         && fs->blklist[i].y == y0
         && fs->blklist[i].w == w0
         && fs->blklist[i].h == h0)
        {
            int j;

            fs->blklist[i].w = 0;

            /* set the area to unused space */

            mark_unused(fs->row_buf,        fs->col_buf,
                        fs->blk_row_buf,    fs->blk_col_buf,
                        fs->nat_row_buf,    fs->nat_col_buf,
                        x0,     y0,     w0,     h0);

            /* check for intersections with other blocks */

            for (j = 0; j < MAX_BLOCK_AREAS; ++j)
            {
                if (fs->blklist[j].w == 0)
                    continue;

                if (j == i)
                    continue; /* __ */
                {
                    int ix0 = MAX(fs->blklist[j].x, x0);
                    int iy0 = MAX(fs->blklist[j].y, y0);
                    int ix1 = MIN(x0 + w0,  fs->blklist[j].x
                                          + fs->blklist[j].w);
                    int iy1 = MIN(y0 + h0,  fs->blklist[j].y
                                          + fs->blklist[j].h);

                    if (ix1 > ix0 && iy1 > iy0)
                    {
                        mark_used(  fs->row_buf,        fs->col_buf,
                                    fs->blk_row_buf,    fs->blk_col_buf,
                                    ix0,    iy0,    ix1 - ix0,  iy1 - iy0);
                    }
                }
            }
        }
    }
}
Пример #5
0
static tree
build_eh_type_type (tree type)
{
  tree exp = eh_type_info (type);

  if (!exp)
    return NULL;

  mark_used (exp);

  return build1 (ADDR_EXPR, ptr_type_node, exp);
}
Пример #6
0
static tree
build_eh_type_type (tree type)
{
  tree exp = eh_type_info (type);

  if (!exp)
    return NULL;

  mark_used (exp);

  return convert (ptr_type_node, build_address (exp));
}
void SynopsisMD2::learn_from_tn(vector<int> low,vector<int> high)
{

	//increment the weights of involved leaves
	if(clock_bits==0)
		return;

	mark_used(low,high);



}
/**
 * This pass finds sources that are not used by their instruction and marks
 * them as unused. 
 */
void rc_pair_remove_dead_sources(struct radeon_compiler * c, void *user)
{
	struct rc_instruction * inst;
	for (inst = c->Program.Instructions.Next;
					inst != &c->Program.Instructions;
					inst = inst->Next) {
		unsigned int i;
		if (inst->Type == RC_INSTRUCTION_NORMAL)
			continue;

		/* Mark all sources as unused */
		for (i = 0; i < 4; i++) {
			inst->U.P.RGB.Src[i].Used = 0;
			inst->U.P.Alpha.Src[i].Used = 0;
		}
		mark_used(inst, &inst->U.P.RGB);
		mark_used(inst, &inst->U.P.Alpha);

		mark_used_presub(&inst->U.P.RGB);
		mark_used_presub(&inst->U.P.Alpha);
	}
}
Пример #9
0
bool
emit_tinfo_decl (tree decl)
{
  tree type = TREE_TYPE (DECL_NAME (decl));
  int in_library = typeinfo_in_lib_p (type);
  tree var_desc, var_init;

  gcc_assert (DECL_TINFO_P (decl)); 
  
  if (in_library)
    {
      if (doing_runtime)
	DECL_EXTERNAL (decl) = 0;
      else
	{
	  /* If we're not in the runtime, then DECL (which is already
	     DECL_EXTERNAL) will not be defined here.  */
	  DECL_INTERFACE_KNOWN (decl) = 1;
	  return false;
	}
    }
  else if (involves_incomplete_p (type))
    {
      if (!decl_needed_p (decl))
	return false;
      /* If TYPE involves an incomplete class type, then the typeinfo
	 object will be emitted with internal linkage.  There is no
	 way to know whether or not types are incomplete until the end
	 of the compilation, so this determination must be deferred
	 until this point.  */
      TREE_PUBLIC (decl) = 0;
      DECL_EXTERNAL (decl) = 0;
      DECL_INTERFACE_KNOWN (decl) = 1;
    }

  import_export_decl (decl);
  if (DECL_NOT_REALLY_EXTERN (decl) && decl_needed_p (decl))
    {
      DECL_EXTERNAL (decl) = 0;
      var_desc = get_pseudo_ti_desc (type);
      var_init = get_pseudo_ti_init (type, var_desc);
      DECL_INITIAL (decl) = var_init;
      mark_used (decl);
      cp_finish_decl (decl, var_init, NULL_TREE, 0);
      return true;
    }
  else
    return false;
}
Пример #10
0
static tree
cp_ubsan_instrument_vptr (location_t loc, tree op, tree type, bool is_addr,
			  enum ubsan_null_ckind ckind)
{
  type = TYPE_MAIN_VARIANT (type);
  const char *mangled = mangle_type_string (type);
  hashval_t str_hash1 = htab_hash_string (mangled);
  hashval_t str_hash2 = iterative_hash (mangled, strlen (mangled), 0);
  tree str_hash = wide_int_to_tree (uint64_type_node,
				    wi::uhwi (((uint64_t) str_hash1 << 32)
					      | str_hash2, 64));
  if (!is_addr)
    op = build_fold_addr_expr_loc (loc, op);
  op = save_expr (op);
  tree vptr = fold_build3_loc (loc, COMPONENT_REF,
			       TREE_TYPE (TYPE_VFIELD (type)),
			       build_fold_indirect_ref_loc (loc, op),
			       TYPE_VFIELD (type), NULL_TREE);
  vptr = fold_convert_loc (loc, pointer_sized_int_node, vptr);
  vptr = fold_convert_loc (loc, uint64_type_node, vptr);
  if (ckind == UBSAN_DOWNCAST_POINTER)
    {
      tree cond = build2_loc (loc, NE_EXPR, boolean_type_node, op,
			      build_zero_cst (TREE_TYPE (op)));
      /* This is a compiler generated comparison, don't emit
	 e.g. -Wnonnull-compare warning for it.  */
      TREE_NO_WARNING (cond) = 1;
      vptr = build3_loc (loc, COND_EXPR, uint64_type_node, cond,
			 vptr, build_int_cst (uint64_type_node, 0));
    }
  tree ti_decl = get_tinfo_decl (type);
  mark_used (ti_decl);
  tree ptype = build_pointer_type (type);
  tree call
    = build_call_expr_internal_loc (loc, IFN_UBSAN_VPTR,
				    void_type_node, 5, op, vptr, str_hash,
				    build_address (ti_decl),
				    build_int_cst (ptype, ckind));
  TREE_SIDE_EFFECTS (call) = 1;
  return fold_build2 (COMPOUND_EXPR, TREE_TYPE (op), call, op);
}
Пример #11
0
void
finish_eh_spec_block (tree raw_raises, tree eh_spec_block)
{
  tree raises;

  EH_SPEC_STMTS (eh_spec_block) = pop_stmt_list (EH_SPEC_STMTS (eh_spec_block));

  /* Strip cv quals, etc, from the specification types.  */
  for (raises = NULL_TREE;
       raw_raises && TREE_VALUE (raw_raises);
       raw_raises = TREE_CHAIN (raw_raises))
    {
      tree type = prepare_eh_type (TREE_VALUE (raw_raises));
      tree tinfo = eh_type_info (type);

      mark_used (tinfo);
      raises = tree_cons (NULL_TREE, type, raises);
    }

  EH_SPEC_RAISES (eh_spec_block) = raises;
}
Пример #12
0
bool freespace_block_remove(freespace* fs, int x0, int y0, int w0, int h0)
{
    int i;

    for (i = 0; i < MAX_BLOCK_AREAS; ++i)
    {
        if (fs->blklist[i].w == 0)
        {
            fs->blklist[i].x = x0;
            fs->blklist[i].y = y0;
            fs->blklist[i].w = w0;
            fs->blklist[i].h = h0;

            mark_used(  fs->row_buf,        fs->col_buf,
                        fs->blk_row_buf,    fs->blk_col_buf,
                        x0,     y0,     w0,     h0);

            return true;
        }
    }

    return false;
}
Пример #13
0
int find_jpeg_headers(struct jpeg_decompress_struct *cinfo,
                      FILE *infile, size_t blocksize, size_t **offsets) {
  *offsets = malloc(10 * sizeof(size_t));
  int count = 0;
  int size = 10;
  char inbuf[blocksize];
  size_t total_offset = 0;
  size_t read;
  while ((read = fread(inbuf, 1, blocksize, infile)) == blocksize) {
    if((*(unsigned int *)inbuf & 0x00FFFFFFF) == 0x00FFD8FF){
      printf("count is %d\n", count);
      //it's probably a jpeg.
      if(size == count){
        *offsets = realloc(*offsets, 2 * size * sizeof(size_t));
        size = size * 2;
      }
      *offsets[count] = total_offset;
      mark_used((blockrecord)cinfo->src, total_offset);
      count++;
    }
    total_offset += blocksize;
  }
  return count;
}
Пример #14
0
void do_multiboot(struct mboot_info *mbd) {
	if (mbd->flags & MBI_MODINFO) { // module info
		panic();
		k_swrite("modinfo\n", OUT_STD);
		k_iwrite (mbd->module_info.mod_count, OUT_STD);
		k_swrite("\n",OUT_STD);
		for (int i = 0; i < mbd->module_info.mod_count; i++) {
			char* buf = mbd->module_info.mod_addr[i].module_id;
			buf++;
			while (*(buf) && *(buf-1) != ' ') buf++;
			printf ("Loading module [%s]...", buf);
			//k_swrite("Loading module ", OUT_STD);
			//k_swrite(buf, OUT_STD);
			if (strncmp("font ", buf, 5) == 0) {
				struct font_t* fnt = mbd->module_info.mod_addr[i].start;
				font = fnt;
				//set_font(mbd->module_info.mod_addr[i].start, buf[5] - '0');
				k_swrite(" \e[1;34mDONE\e[0m\n", OUT_STD);
				printf ("<w:%d h:%d bw:%d gs:%d ng:%d>\n", fnt->w, fnt->h, fnt->w_byte, fnt->glyph_size, fnt->nGlyphs);
				mark_used (mbd->module_info.mod_addr[i].start,mbd->module_info.mod_addr[i].end - mbd->module_info.mod_addr[i].start);
			} else {
				k_swrite(" \e[1;31mUnknown module name!\e[0m\n", OUT_STD);
			}
		}
Пример #15
0
int wiiso_save_wbfs(wiiso_t disc, const char *output_file, int type_filter, off_t split_size) {
    static const uint8_t de_bruijn[] = { 0, 1, 28, 2, 29, 14, 24, 3, 30, 22, 20, 15, 25, 17, 4, 8, 31, 27, 13, 23, 21, 19, 16, 7, 26, 12, 18, 6, 11, 5, 10, 9};
    uint8_t wbfshdr[16] = "WBFSaCaB\x09\x00\x00\x00\x01\x00\x00\x00", *sector;
    struct wbfs_table *wtbl = NULL;
    unsigned int i, k, todump = 0;
    struct _wbfs_wrt wbfs_wrt;

    fail_if_closed();

    for(i=0; i<disc->num_partitions; i++) {
	if(to_be_skipped(disc, i, type_filter))
	    continue;
	todump++;
	if(mark_partition_used(&wtbl, disc, i)) {
	    log_error(disc->log_level, "wiiso_save_wbfs: failed to mark partition %u as used\n", i);
	    return 1;
	}
    }

    if(mark_used(&wtbl, 0, sizeof(struct iso_header), disc->log_level)) {
	log_error(disc->log_level, "wiiso_save_wbfs: failed to mark iso header as used\n");
	return 1;
    }

    if(mark_used(&wtbl, 0x40000, 0x8000 * 2, disc->log_level)) {
	log_error(disc->log_level, "wiiso_save_wbfs: failed to mark partition table as used\n");
	return 1;
    }

    if(mark_used(&wtbl, disc->off_to_part_tbl, 8*todump, disc->log_level)) {
	log_error(disc->log_level, "wiiso_save_wbfs: failed to mark partition table as used\n");
	return 1;
    }

    if(!(sector = malloc(wtbl->sector_size))) {
	log_error(disc->log_level, "wiiso_save_wbfs: out of memory when allocating read/write buffer (%u bytes)\n", wtbl->sector_size);
	free(wtbl);
	return 1;
    }

    memset(&wbfs_wrt, 0, sizeof(wbfs_wrt));
    wbfs_wrt.split_size = split_size;
    wbfs_wrt.filename[0] = (char *)output_file;

    for(i=0, k=1; i<wtbl->num_entries; i++) {
	if(!wtbl->sec_table[i])
	    continue;

	wtbl->sec_table[i] = htons(k);

	if(disc_iso_read_with_blanks(disc, (off_t)i * wtbl->sector_size, sector, wtbl->sector_size, 1)) {
	    log_error(disc->log_level, "wiiso_save_wbfs: failed to read entry %x (sector %llx)\n", i, (unsigned long long)i * wtbl->sector_size);
	    wbfs_wrt_free(&wbfs_wrt, 1);
	    free(sector);
	    free(wtbl);
	    return 1;
	}

	if(write_wbfs_part(sector, wtbl->sector_size, (off_t)k * wtbl->sector_size, &wbfs_wrt, disc->log_level)) {
	    free(sector);
	    free(wtbl);
	    return 1;
	}

	if(!i && write_wbfs_part(sector, 0x100, 0x200, &wbfs_wrt, disc->log_level)) {
	    free(sector);
	    free(wtbl);
	    return 1;
	}
	k++;
    }

    if(write_wbfs_part(wtbl->sec_table, wtbl->num_entries * sizeof(uint16_t), 0x300, &wbfs_wrt, disc->log_level)) {
	free(sector);
	free(wtbl);
	return 1;
    }

    split_size = k;
    split_size *= wtbl->sector_size;
    split_size /= 0x200;
    wbfshdr[4] = split_size>>24;
    wbfshdr[5] = split_size>>16;
    wbfshdr[6] = split_size>>8;
    wbfshdr[7] = split_size;

    wbfshdr[9] = de_bruijn[(wtbl->sector_size * 0x077cb531) >> 27];

    if(write_wbfs_part(wbfshdr, 16, 0, &wbfs_wrt, disc->log_level)) {
	free(sector);
	free(wtbl);
	return 1;
    }

    if(todump != disc->num_partitions) {
	struct _uint_pair uint_pair;
	off_t part_tbl = ntohs(wtbl->sec_table[0x40000 / wtbl->sector_size]);
	part_tbl *= wtbl->sector_size;
	part_tbl += 0x40000 % wtbl->sector_size;
	uint_pair.uint0 = htonl(todump);

 	if(write_wbfs_part(&uint_pair.uint0, sizeof(uint_pair.uint0), part_tbl, &wbfs_wrt, disc->log_level)) {
	    free(sector);
	    free(wtbl);
	    return 1;
	}

	part_tbl = ntohs(wtbl->sec_table[disc->off_to_part_tbl / wtbl->sector_size]);
	part_tbl *= wtbl->sector_size;
	part_tbl += disc->off_to_part_tbl % wtbl->sector_size;

	for(i=0; i<disc->num_partitions; i++) {
	    if(to_be_skipped(disc, i, type_filter))
		continue;

	    uint_pair.uint0 = htonl(disc->partitions[i].offset_to_partition >> 2);
	    uint_pair.uint1 = htonl(disc->partitions[i].partition_type);

	    if(write_wbfs_part(&uint_pair, sizeof(uint_pair), part_tbl, &wbfs_wrt, disc->log_level)) {
		free(sector);
		free(wtbl);
		return 1;
	    }
	    part_tbl += sizeof(uint_pair);
	}
    }

    wbfs_wrt_free(&wbfs_wrt, 0);
    free(sector);
    free(wtbl);

    return 0;
}
Пример #16
0
tree
build_throw (tree exp)
{
  if (exp == error_mark_node)
    return exp;

  if (processing_template_decl)
    {
      if (cfun)
	current_function_returns_abnormally = 1;
      exp = build_min (THROW_EXPR, void_type_node, exp);
      SET_EXPR_LOCATION (exp, input_location);
      return exp;
    }

  if (exp && null_node_p (exp))
    warning (0, "throwing NULL, which has integral, not pointer type");

  if (exp != NULL_TREE)
    {
      if (!is_admissible_throw_operand_or_catch_parameter (exp, true))
	return error_mark_node;
    }

  if (! doing_eh ())
    return error_mark_node;

  if (exp)
    {
      tree throw_type;
      tree temp_type;
      tree cleanup;
      tree object, ptr;
      tree tmp;
      tree allocate_expr;

      /* The CLEANUP_TYPE is the internal type of a destructor.  */
      if (!cleanup_type)
	{
	  tmp = build_function_type_list (void_type_node,
					  ptr_type_node, NULL_TREE);
	  cleanup_type = build_pointer_type (tmp);
	}

      if (!throw_fn)
	{
	  tree name = get_identifier ("__cxa_throw");
	  throw_fn = get_global_binding (name);
	  if (!throw_fn)
	    {
	      /* Declare void __cxa_throw (void*, void*, void (*)(void*)).  */
	      /* ??? Second argument is supposed to be "std::type_info*".  */
	      tmp = build_function_type_list (void_type_node,
					      ptr_type_node, ptr_type_node,
					      cleanup_type, NULL_TREE);
	      throw_fn = push_throw_library_fn (name, tmp);

	      if (flag_tm)
		{
		  tree itm_name = get_identifier ("_ITM_cxa_throw");
		  tree itm_fn = get_global_binding (itm_name);
		  if (!itm_fn)
		    itm_fn = push_throw_library_fn (itm_name, tmp);
		  apply_tm_attr (itm_fn, get_identifier ("transaction_pure"));
		  record_tm_replacement (throw_fn, itm_fn);
		}
	    }
	}

      /* [except.throw]

	 A throw-expression initializes a temporary object, the type
	 of which is determined by removing any top-level
	 cv-qualifiers from the static type of the operand of throw
	 and adjusting the type from "array of T" or "function return
	 T" to "pointer to T" or "pointer to function returning T"
	 respectively.  */
      temp_type = is_bitfield_expr_with_lowered_type (exp);
      if (!temp_type)
	temp_type = cv_unqualified (type_decays_to (TREE_TYPE (exp)));

      /* OK, this is kind of wacky.  The standard says that we call
	 terminate when the exception handling mechanism, after
	 completing evaluation of the expression to be thrown but
	 before the exception is caught (_except.throw_), calls a
	 user function that exits via an uncaught exception.

	 So we have to protect the actual initialization of the
	 exception object with terminate(), but evaluate the
	 expression first.  Since there could be temps in the
	 expression, we need to handle that, too.  We also expand
	 the call to __cxa_allocate_exception first (which doesn't
	 matter, since it can't throw).  */

      /* Allocate the space for the exception.  */
      allocate_expr = do_allocate_exception (temp_type);
      allocate_expr = get_target_expr (allocate_expr);
      ptr = TARGET_EXPR_SLOT (allocate_expr);
      TARGET_EXPR_CLEANUP (allocate_expr) = do_free_exception (ptr);
      CLEANUP_EH_ONLY (allocate_expr) = 1;

      object = build_nop (build_pointer_type (temp_type), ptr);
      object = cp_build_fold_indirect_ref (object);

      /* And initialize the exception object.  */
      if (CLASS_TYPE_P (temp_type))
	{
	  int flags = LOOKUP_NORMAL | LOOKUP_ONLYCONVERTING;
	  vec<tree, va_gc> *exp_vec;
	  bool converted = false;

	  /* Under C++0x [12.8/16 class.copy], a thrown lvalue is sometimes
	     treated as an rvalue for the purposes of overload resolution
	     to favor move constructors over copy constructors.  */
	  if (/* Must be a local, automatic variable.  */
	      VAR_P (exp)
	      && DECL_CONTEXT (exp) == current_function_decl
	      && ! TREE_STATIC (exp)
	      /* The variable must not have the `volatile' qualifier.  */
	      && !(cp_type_quals (TREE_TYPE (exp)) & TYPE_QUAL_VOLATILE))
	    {
	      tree moved = move (exp);
	      exp_vec = make_tree_vector_single (moved);
	      moved = (build_special_member_call
		       (object, complete_ctor_identifier, &exp_vec,
			TREE_TYPE (object), flags|LOOKUP_PREFER_RVALUE,
			tf_none));
	      release_tree_vector (exp_vec);
	      if (moved != error_mark_node)
		{
		  exp = moved;
		  converted = true;
		}
	    }

	  /* Call the copy constructor.  */
	  if (!converted)
	    {
	      exp_vec = make_tree_vector_single (exp);
	      exp = (build_special_member_call
		     (object, complete_ctor_identifier, &exp_vec,
		      TREE_TYPE (object), flags, tf_warning_or_error));
	      release_tree_vector (exp_vec);
	    }

	  if (exp == error_mark_node)
	    {
	      error ("  in thrown expression");
	      return error_mark_node;
	    }
	}
      else
	{
	  tmp = decay_conversion (exp, tf_warning_or_error);
	  if (tmp == error_mark_node)
	    return error_mark_node;
	  exp = build2 (INIT_EXPR, temp_type, object, tmp);
	}

      /* Mark any cleanups from the initialization as MUST_NOT_THROW, since
	 they are run after the exception object is initialized.  */
      cp_walk_tree_without_duplicates (&exp, wrap_cleanups_r, 0);

      /* Prepend the allocation.  */
      exp = build2 (COMPOUND_EXPR, TREE_TYPE (exp), allocate_expr, exp);

      /* Force all the cleanups to be evaluated here so that we don't have
	 to do them during unwinding.  */
      exp = build1 (CLEANUP_POINT_EXPR, void_type_node, exp);

      throw_type = build_eh_type_type (prepare_eh_type (TREE_TYPE (object)));

      cleanup = NULL_TREE;
      if (type_build_dtor_call (TREE_TYPE (object)))
	{
	  tree dtor_fn = lookup_fnfields (TYPE_BINFO (TREE_TYPE (object)),
					  complete_dtor_identifier, 0);
	  dtor_fn = BASELINK_FUNCTIONS (dtor_fn);
	  mark_used (dtor_fn);
	  if (TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (object)))
	    {
	      cxx_mark_addressable (dtor_fn);
	      /* Pretend it's a normal function.  */
	      cleanup = build1 (ADDR_EXPR, cleanup_type, dtor_fn);
	    }
	}
      if (cleanup == NULL_TREE)
	cleanup = build_int_cst (cleanup_type, 0);

      /* ??? Indicate that this function call throws throw_type.  */
      tmp = cp_build_function_call_nary (throw_fn, tf_warning_or_error,
					 ptr, throw_type, cleanup, NULL_TREE);

      /* Tack on the initialization stuff.  */
      exp = build2 (COMPOUND_EXPR, TREE_TYPE (tmp), exp, tmp);
    }
  else
    {
      /* Rethrow current exception.  */
      if (!rethrow_fn)
	{
	  tree name = get_identifier ("__cxa_rethrow");
	  rethrow_fn = get_global_binding (name);
	  if (!rethrow_fn)
	    /* Declare void __cxa_rethrow (void).  */
	    rethrow_fn = push_throw_library_fn
	      (name, build_function_type_list (void_type_node, NULL_TREE));

	  if (flag_tm)
	    apply_tm_attr (rethrow_fn, get_identifier ("transaction_pure"));
	}

      /* ??? Indicate that this function call allows exceptions of the type
	 of the enclosing catch block (if known).  */
      exp = cp_build_function_call_vec (rethrow_fn, NULL, tf_warning_or_error);
    }

  exp = build1 (THROW_EXPR, void_type_node, exp);
  SET_EXPR_LOCATION (exp, input_location);

  return exp;
}
Пример #17
0
void
use_thunk (tree thunk_fndecl, bool emit_p)
{
  tree a, t, function, alias;
  tree virtual_offset;
  HOST_WIDE_INT fixed_offset, virtual_value;
  bool this_adjusting = DECL_THIS_THUNK_P (thunk_fndecl);

  /* We should have called finish_thunk to give it a name.  */
  gcc_assert (DECL_NAME (thunk_fndecl));

  /* We should never be using an alias, always refer to the
     aliased thunk.  */
  gcc_assert (!THUNK_ALIAS (thunk_fndecl));

  if (TREE_ASM_WRITTEN (thunk_fndecl))
    return;

  function = THUNK_TARGET (thunk_fndecl);
  if (DECL_RESULT (thunk_fndecl))
    /* We already turned this thunk into an ordinary function.
       There's no need to process this thunk again.  */
    return;

  if (DECL_THUNK_P (function))
    /* The target is itself a thunk, process it now.  */
    use_thunk (function, emit_p);

  /* Thunks are always addressable; they only appear in vtables.  */
  TREE_ADDRESSABLE (thunk_fndecl) = 1;

  /* Figure out what function is being thunked to.  It's referenced in
     this translation unit.  */
  TREE_ADDRESSABLE (function) = 1;
  mark_used (function);
  if (!emit_p)
    return;

  if (0 && TARGET_USE_LOCAL_THUNK_ALIAS_P (function))
   alias = make_alias_for_thunk (function);
  else
   alias = function;

  fixed_offset = THUNK_FIXED_OFFSET (thunk_fndecl);
  virtual_offset = THUNK_VIRTUAL_OFFSET (thunk_fndecl);

  if (virtual_offset)
    {
      if (!this_adjusting)
	virtual_offset = BINFO_VPTR_FIELD (virtual_offset);
      virtual_value = tree_low_cst (virtual_offset, /*pos=*/0);
      gcc_assert (virtual_value);
    }
  else
    virtual_value = 0;

  /* And, if we need to emit the thunk, it's used.  */
  mark_used (thunk_fndecl);
  /* This thunk is actually defined.  */
  DECL_EXTERNAL (thunk_fndecl) = 0;
  /* The linkage of the function may have changed.  FIXME in linkage
     rewrite.  */
  TREE_PUBLIC (thunk_fndecl) = TREE_PUBLIC (function);
  DECL_VISIBILITY (thunk_fndecl) = DECL_VISIBILITY (function);
  DECL_VISIBILITY_SPECIFIED (thunk_fndecl)
    = DECL_VISIBILITY_SPECIFIED (function);
  if (DECL_ONE_ONLY (function) || DECL_WEAK (function))
    make_decl_one_only (thunk_fndecl, cxx_comdat_group (thunk_fndecl));

  if (flag_syntax_only)
    {
      TREE_ASM_WRITTEN (thunk_fndecl) = 1;
      return;
    }

  push_to_top_level ();

  if (TARGET_USE_LOCAL_THUNK_ALIAS_P (function)
      && targetm.have_named_sections)
    {
      resolve_unique_section (function, 0, flag_function_sections);

      if (DECL_SECTION_NAME (function) != NULL && DECL_ONE_ONLY (function))
	{
	  resolve_unique_section (thunk_fndecl, 0, flag_function_sections);

	  /* Output the thunk into the same section as function.  */
	  DECL_SECTION_NAME (thunk_fndecl) = DECL_SECTION_NAME (function);
	}
    }

  /* Set up cloned argument trees for the thunk.  */
  t = NULL_TREE;
  for (a = DECL_ARGUMENTS (function); a; a = TREE_CHAIN (a))
    {
      tree x = copy_node (a);
      TREE_CHAIN (x) = t;
      DECL_CONTEXT (x) = thunk_fndecl;
      SET_DECL_RTL (x, NULL_RTX);
      DECL_HAS_VALUE_EXPR_P (x) = 0;
      t = x;
    }
  a = nreverse (t);
  DECL_ARGUMENTS (thunk_fndecl) = a;
  TREE_ASM_WRITTEN (thunk_fndecl) = 1;
  cgraph_add_thunk (thunk_fndecl, function,
		    this_adjusting, fixed_offset, virtual_value,
		    virtual_offset, alias);

  if (!this_adjusting
      || !targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
					       virtual_value, alias))
    {
      /* If this is a covariant thunk, or we don't have the necessary
	 code for efficient thunks, generate a thunk function that
	 just makes a call to the real function.  Unfortunately, this
	 doesn't work for varargs.  */

      if (varargs_function_p (function))
	error ("generic thunk code fails for method %q#D which uses %<...%>",
	       function);
    }

  pop_from_top_level ();
}
Пример #18
0
tree
expand_start_catch_block (tree decl)
{
  tree exp;
  tree type, init;

  if (! doing_eh ())
    return NULL_TREE;

  if (decl)
    {
      if (!is_admissible_throw_operand_or_catch_parameter (decl, false))
	decl = error_mark_node;

      type = prepare_eh_type (TREE_TYPE (decl));
      mark_used (eh_type_info (type));
    }
  else
    type = NULL_TREE;

  /* Call __cxa_end_catch at the end of processing the exception.  */
  push_eh_cleanup (type);

  init = do_begin_catch ();

  /* If there's no decl at all, then all we need to do is make sure
     to tell the runtime that we've begun handling the exception.  */
  if (decl == NULL || decl == error_mark_node || init == error_mark_node)
    finish_expr_stmt (init);

  /* If the C++ object needs constructing, we need to do that before
     calling __cxa_begin_catch, so that std::uncaught_exception gets
     the right value during the copy constructor.  */
  else if (flag_use_cxa_get_exception_ptr
	   && TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
    {
      exp = do_get_exception_ptr ();
      initialize_handler_parm (decl, exp);
      finish_expr_stmt (init);
    }

  /* Otherwise the type uses a bitwise copy, and we don't have to worry
     about the value of std::uncaught_exception and therefore can do the
     copy with the return value of __cxa_end_catch instead.  */
  else
    {
      tree init_type = type;

      /* Pointers are passed by values, everything else by reference.  */
      if (!TYPE_PTR_P (type))
	init_type = build_pointer_type (type);
      if (init_type != TREE_TYPE (init))
	init = build1 (NOP_EXPR, init_type, init);
      exp = create_temporary_var (init_type);
      cp_finish_decl (exp, init, /*init_const_expr=*/false,
		      NULL_TREE, LOOKUP_ONLYCONVERTING);
      DECL_REGISTER (exp) = 1;
      initialize_handler_parm (decl, exp);
    }

  return type;
}
Пример #19
0
void freespace_remove(freespace* fs, int x0, int y0, int w0, int h0)
{
    mark_used(  fs->row_buf,        fs->col_buf,
                fs->nat_row_buf,    fs->nat_col_buf,
                x0,     y0,     w0,     h0);
}
Пример #20
0
static tree
tinfo_base_init (tree desc, tree target)
{
  tree init = NULL_TREE;
  tree name_decl;
  tree vtable_ptr;
  
  {
    tree name_name;
    
    /* Generate the NTBS array variable.  */
    tree name_type = build_cplus_array_type
                     (build_qualified_type (char_type_node, TYPE_QUAL_CONST),
                     NULL_TREE);
    tree name_string = tinfo_name (target);

    /* Determine the name of the variable -- and remember with which
       type it is associated.  */
    name_name = mangle_typeinfo_string_for_type (target);
    TREE_TYPE (name_name) = target;

    name_decl = build_lang_decl (VAR_DECL, name_name, name_type);
    SET_DECL_ASSEMBLER_NAME (name_decl, name_name);
    DECL_ARTIFICIAL (name_decl) = 1;
    DECL_IGNORED_P (name_decl) = 1;
    TREE_READONLY (name_decl) = 1;
    TREE_STATIC (name_decl) = 1;
    DECL_EXTERNAL (name_decl) = 0;
    DECL_TINFO_P (name_decl) = 1;
    if (involves_incomplete_p (target))
      {
	TREE_PUBLIC (name_decl) = 0;
	DECL_INTERFACE_KNOWN (name_decl) = 1;
      }
    else
      set_linkage_according_to_type (target, name_decl);
    import_export_decl (name_decl);
    DECL_INITIAL (name_decl) = name_string;
    mark_used (name_decl);
    pushdecl_top_level_and_finish (name_decl, name_string);
  }

  vtable_ptr = TINFO_VTABLE_DECL (desc);
  if (!vtable_ptr)
    {
      tree real_type;
  
      push_nested_namespace (abi_node);
      real_type = xref_tag (class_type, TINFO_REAL_NAME (desc),
			    /* APPLE LOCAL 4184203 */
			    /*tag_scope=*/ts_global, false);
      pop_nested_namespace (abi_node);
  
      if (!COMPLETE_TYPE_P (real_type))
	{
          /* We never saw a definition of this type, so we need to
	     tell the compiler that this is an exported class, as
	     indeed all of the __*_type_info classes are.  */
	  SET_CLASSTYPE_INTERFACE_KNOWN (real_type);
	  CLASSTYPE_INTERFACE_ONLY (real_type) = 1;
	}

      vtable_ptr = get_vtable_decl (real_type, /*complete=*/1);
      vtable_ptr = build_unary_op (ADDR_EXPR, vtable_ptr, 0);

      /* We need to point into the middle of the vtable.  */
      vtable_ptr = build2
	(PLUS_EXPR, TREE_TYPE (vtable_ptr), vtable_ptr,
	 size_binop (MULT_EXPR,
		     size_int (2 * TARGET_VTABLE_DATA_ENTRY_DISTANCE),
		     TYPE_SIZE_UNIT (vtable_entry_type)));

      TINFO_VTABLE_DECL (desc) = vtable_ptr;
    }

  init = tree_cons (NULL_TREE, vtable_ptr, init);
  
  init = tree_cons (NULL_TREE, decay_conversion (name_decl), init);
  
  init = build_constructor (NULL_TREE, nreverse (init));
  TREE_CONSTANT (init) = 1;
  TREE_INVARIANT (init) = 1;
  TREE_STATIC (init) = 1;
  init = tree_cons (NULL_TREE, init, NULL_TREE);
  
  return init;
}
Пример #21
0
tree
build_lambda_object (tree lambda_expr)
{
  /* Build aggregate constructor call.
     - cp_parser_braced_list
     - cp_parser_functional_cast  */
  vec<constructor_elt, va_gc> *elts = NULL;
  tree node, expr, type;
  location_t saved_loc;

  if (processing_template_decl)
    return lambda_expr;

  /* Make sure any error messages refer to the lambda-introducer.  */
  saved_loc = input_location;
  input_location = LAMBDA_EXPR_LOCATION (lambda_expr);

  for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
       node;
       node = TREE_CHAIN (node))
    {
      tree field = TREE_PURPOSE (node);
      tree val = TREE_VALUE (node);

      if (field == error_mark_node)
	{
	  expr = error_mark_node;
	  goto out;
	}

      if (DECL_P (val))
	mark_used (val);

      /* Mere mortals can't copy arrays with aggregate initialization, so
	 do some magic to make it work here.  */
      if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
	val = build_array_copy (val);
      else if (DECL_NORMAL_CAPTURE_P (field)
	       && !DECL_VLA_CAPTURE_P (field)
	       && TREE_CODE (TREE_TYPE (field)) != REFERENCE_TYPE)
	{
	  /* "the entities that are captured by copy are used to
	     direct-initialize each corresponding non-static data
	     member of the resulting closure object."

	     There's normally no way to express direct-initialization
	     from an element of a CONSTRUCTOR, so we build up a special
	     TARGET_EXPR to bypass the usual copy-initialization.  */
	  val = force_rvalue (val, tf_warning_or_error);
	  if (TREE_CODE (val) == TARGET_EXPR)
	    TARGET_EXPR_DIRECT_INIT_P (val) = true;
	}

      CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
    }

  expr = build_constructor (init_list_type_node, elts);
  CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;

  /* N2927: "[The closure] class type is not an aggregate."
     But we briefly treat it as an aggregate to make this simpler.  */
  type = LAMBDA_EXPR_CLOSURE (lambda_expr);
  CLASSTYPE_NON_AGGREGATE (type) = 0;
  expr = finish_compound_literal (type, expr, tf_warning_or_error);
  CLASSTYPE_NON_AGGREGATE (type) = 1;

 out:
  input_location = saved_loc;
  return expr;
}
Пример #22
0
static tree
build_dynamic_cast_1 (tree type, tree expr)
{
  enum tree_code tc = TREE_CODE (type);
  tree exprtype = TREE_TYPE (expr);
  tree dcast_fn;
  tree old_expr = expr;
  const char *errstr = NULL;

  /* T shall be a pointer or reference to a complete class type, or
     `pointer to cv void''.  */
  switch (tc)
    {
    case POINTER_TYPE:
      if (TREE_CODE (TREE_TYPE (type)) == VOID_TYPE)
	break;
      /* Fall through.  */
    case REFERENCE_TYPE:
      if (! IS_AGGR_TYPE (TREE_TYPE (type)))
	{
	  errstr = "target is not pointer or reference to class";
	  goto fail;
	}
      if (!COMPLETE_TYPE_P (complete_type (TREE_TYPE (type))))
	{
	  errstr = "target is not pointer or reference to complete type";
	  goto fail;
	}
      break;

    default:
      errstr = "target is not pointer or reference";
      goto fail;
    }

  if (tc == POINTER_TYPE)
    {
      /* If T is a pointer type, v shall be an rvalue of a pointer to
	 complete class type, and the result is an rvalue of type T.  */

      if (TREE_CODE (exprtype) != POINTER_TYPE)
	{
	  errstr = "source is not a pointer";
	  goto fail;
	}
      if (! IS_AGGR_TYPE (TREE_TYPE (exprtype)))
	{
	  errstr = "source is not a pointer to class";
	  goto fail;
	}
      if (!COMPLETE_TYPE_P (complete_type (TREE_TYPE (exprtype))))
	{
	  errstr = "source is a pointer to incomplete type";
	  goto fail;
	}
    }
  else
    {
      /* Apply trivial conversion T -> T& for dereferenced ptrs.  */
      exprtype = build_reference_type (exprtype);
      expr = convert_to_reference (exprtype, expr, CONV_IMPLICIT,
				   LOOKUP_NORMAL, NULL_TREE);

      /* T is a reference type, v shall be an lvalue of a complete class
	 type, and the result is an lvalue of the type referred to by T.  */

      if (! IS_AGGR_TYPE (TREE_TYPE (exprtype)))
	{
	  errstr = "source is not of class type";
	  goto fail;
	}
      if (!COMPLETE_TYPE_P (complete_type (TREE_TYPE (exprtype))))
	{
	  errstr = "source is of incomplete class type";
	  goto fail;
	}
      
    }

  /* The dynamic_cast operator shall not cast away constness.  */
  if (!at_least_as_qualified_p (TREE_TYPE (type),
				TREE_TYPE (exprtype)))
    {
      errstr = "conversion casts away constness";
      goto fail;
    }

  /* If *type is an unambiguous accessible base class of *exprtype,
     convert statically.  */
  {
    tree binfo;

    binfo = lookup_base (TREE_TYPE (exprtype), TREE_TYPE (type),
			 ba_check, NULL);

    if (binfo)
      {
	expr = build_base_path (PLUS_EXPR, convert_from_reference (expr),
				binfo, 0);
	if (TREE_CODE (exprtype) == POINTER_TYPE)
	  expr = non_lvalue (expr);
	return expr;
      }
  }

  /* Otherwise *exprtype must be a polymorphic class (have a vtbl).  */
  if (TYPE_POLYMORPHIC_P (TREE_TYPE (exprtype)))
    {
      tree expr1;
      /* if TYPE is `void *', return pointer to complete object.  */
      if (tc == POINTER_TYPE && VOID_TYPE_P (TREE_TYPE (type)))
	{
	  /* if b is an object, dynamic_cast<void *>(&b) == (void *)&b.  */
	  if (TREE_CODE (expr) == ADDR_EXPR
	      && TREE_CODE (TREE_OPERAND (expr, 0)) == VAR_DECL
	      && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE)
	    return build1 (NOP_EXPR, type, expr);

	  /* Since expr is used twice below, save it.  */
	  expr = save_expr (expr);

	  expr1 = build_headof (expr);
	  if (TREE_TYPE (expr1) != type)
	    expr1 = build1 (NOP_EXPR, type, expr1);
	  return ifnonnull (expr, expr1);
	}
      else
	{
	  tree retval;
          tree result, td2, td3, elems;
          tree static_type, target_type, boff;

 	  /* If we got here, we can't convert statically.  Therefore,
	     dynamic_cast<D&>(b) (b an object) cannot succeed.  */
	  if (tc == REFERENCE_TYPE)
	    {
	      if (TREE_CODE (old_expr) == VAR_DECL
		  && TREE_CODE (TREE_TYPE (old_expr)) == RECORD_TYPE)
		{
	          tree expr = throw_bad_cast ();
		  warning ("dynamic_cast of %q#D to %q#T can never succeed",
                           old_expr, type);
	          /* Bash it to the expected type.  */
	          TREE_TYPE (expr) = type;
		  return expr;
		}
	    }
	  /* Ditto for dynamic_cast<D*>(&b).  */
	  else if (TREE_CODE (expr) == ADDR_EXPR)
	    {
	      tree op = TREE_OPERAND (expr, 0);
	      if (TREE_CODE (op) == VAR_DECL
		  && TREE_CODE (TREE_TYPE (op)) == RECORD_TYPE)
		{
		  warning ("dynamic_cast of %q#D to %q#T can never succeed",
                           op, type);
		  retval = build_int_cst (type, 0); 
		  return retval;
		}
	    }

	  target_type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
	  static_type = TYPE_MAIN_VARIANT (TREE_TYPE (exprtype));
	  td2 = get_tinfo_decl (target_type);
	  mark_used (td2);
	  td2 = build_unary_op (ADDR_EXPR, td2, 0);
	  td3 = get_tinfo_decl (static_type);
	  mark_used (td3);
	  td3 = build_unary_op (ADDR_EXPR, td3, 0);

          /* Determine how T and V are related.  */
          boff = dcast_base_hint (static_type, target_type);
          
	  /* Since expr is used twice below, save it.  */
	  expr = save_expr (expr);

	  expr1 = expr;
	  if (tc == REFERENCE_TYPE)
	    expr1 = build_unary_op (ADDR_EXPR, expr1, 0);

	  elems = tree_cons
	    (NULL_TREE, expr1, tree_cons
	     (NULL_TREE, td3, tree_cons
	      (NULL_TREE, td2, tree_cons
	       (NULL_TREE, boff, NULL_TREE))));

	  dcast_fn = dynamic_cast_node;
	  if (!dcast_fn)
	    {
	      tree tmp;
	      tree tinfo_ptr;
	      tree ns = abi_node;
	      const char *name;
	      
	      push_nested_namespace (ns);
	      tinfo_ptr = xref_tag (class_type,
				    get_identifier ("__class_type_info"),
				    /* APPLE LOCAL 4184203 */
				    /*tag_scope=*/ts_global, false);
	      
	      tinfo_ptr = build_pointer_type
		(build_qualified_type
		 (tinfo_ptr, TYPE_QUAL_CONST));
	      name = "__dynamic_cast";
	      tmp = tree_cons
		(NULL_TREE, const_ptr_type_node, tree_cons
		 (NULL_TREE, tinfo_ptr, tree_cons
		  (NULL_TREE, tinfo_ptr, tree_cons
		   (NULL_TREE, ptrdiff_type_node, void_list_node))));
	      tmp = build_function_type (ptr_type_node, tmp);
	      dcast_fn = build_library_fn_ptr (name, tmp);
	      DECL_IS_PURE (dcast_fn) = 1;
              pop_nested_namespace (ns);
              dynamic_cast_node = dcast_fn;
	    }
          result = build_cxx_call (dcast_fn, elems);

	  if (tc == REFERENCE_TYPE)
	    {
	      tree bad = throw_bad_cast ();
	      
	      result = save_expr (result);
	      return build3 (COND_EXPR, type, result, result, bad);
	    }

	  /* Now back to the type we want from a void*.  */
	  result = cp_convert (type, result);
          return ifnonnull (expr, result);
	}
    }
  else
    errstr = "source type is not polymorphic";

 fail:
  error ("cannot dynamic_cast %qE (of type %q#T) to type %q#T (%s)",
         expr, exprtype, type, errstr);
  return error_mark_node;
}
Пример #23
0
/* MAX_SLOPE
 * ---------
 * Input parameters:
 *   dataCol   - the column representing the independent axis
 *   binCol    - the column with the data values
 *   numChans  - number of channels in groupCol and qualCol
 *   slope     - the maximum slope threshold
 *   groupCol  - the GROUPING column
 *   qualCol   - the QUALITY column
 *   tabStops  - array giving channels with tabs or stops
 *   maxlength - maximum size of groups
 */
int grp_do_max_slope(double *dataCol, double *binCol, long numChans,
                     double slope, short *groupCol, short *qualCol,
                     short *tabStops, double maxlength,
                     dsErrList *errList){
    
    long ii, jj, counter = 0;
    double range = 0.0;
    double tempSlope = 0.0;
    short *usedChans;
    
    /* Check for obviously bad inputs */
    if(!dataCol || !binCol || (numChans < 2) || (slope <= 0)
       || !groupCol || !qualCol || !tabStops){
        if(errList)
            dsErrAdd(errList, dsDMGROUPBADPARAMERR, Accumulation,
                     Generic);
        else
            err_msg("ERROR: At least one input parameter has an "
                    "invalid value.\n");
        return(GRP_ERROR);
    }
    
    if(maxlength <= 0.0)
        maxlength = MAX_BIN_LENGTH;
    
    /* Create and initialize used channel list */
    usedChans = (short *) calloc(numChans, sizeof(short));
    for(ii = 0; ii < numChans; ii++){
        if(tabStops[ii] || (qualCol[ii] != 0))
            usedChans[ii] = GRP_TRUE;
        else
            usedChans[ii] = GRP_FALSE;
    }
    
    ii = 0;
    jj = 1;
    while(ii < (numChans - 1)){
        
        /* Are we in a tab or stop? */
        if(tabStops[ii]){
            ii++;
            jj = (ii + 1);
        }
        else{
            while(jj < numChans){
                
                /* Calculate current slope */
                tempSlope = fabs(((binCol[jj] - binCol[ii])
                                  / (dataCol[jj] - dataCol[ii])));
                
                range = (dataCol[jj] - dataCol[ii]);
                
                /* Are we in a tab or stop? */
                if(tabStops[jj]){
                    ii++;
                    jj = (ii + 1);
                    break;
                }
                /* Are we at the end of the table? */
                else if(jj == (numChans - 1)){
                    /* Does this complete a group? */
                    if((tempSlope >= slope) ||
                       (range >= maxlength)){
                        mark_used(usedChans, ii, jj);
                        create_group(groupCol, ii, jj);
                        set_quality(qualCol, GRP_GOOD, ii, jj);
                        ii = jj;
                        break;
                    }
                    else{
                        ii++;
                        jj = (ii + 1);
                        break;
                    }
                }
                /* Are we at the end of a group or have reached
                 * maxlength? */
                else if((tempSlope >= slope) ||
                        (range >= maxlength)){
                    mark_used(usedChans, ii, jj);
                    create_group(groupCol, ii, jj);
                    set_quality(qualCol, GRP_GOOD, ii, jj);
                    ii = (jj + 1);
                    jj = (ii + 1);
                    break;
                }
                /* Keep looking */
                else jj++;
                
            } /* end while(jj) */
        } /* end if */
    } /* end while(ii) */
    
    /* Put unused channels into "bad" groups */
    for(ii = 0; ii < numChans; ii++){
        /* Are we in a used channel? */
        if(usedChans[ii]){
            if(counter != 0){
                set_incomplete(groupCol, qualCol, ii - counter,
                               ii - 1);
                counter = 0;
            }
        }
        /* Are we at the end of the table? */
        else if(ii == (numChans - 1)){
            /* Does this complete a group? */
            if(counter != 0)
                set_incomplete(groupCol, qualCol, ii - counter, ii);
            else
                set_incomplete(groupCol, qualCol, ii, ii);
        }
        /* Are we at the end of a group */
        else if(usedChans[ii + 1]){
            set_incomplete(groupCol, qualCol, ii - counter, ii);
            counter = 0;
        }
        /* Are we at the beginning of a group? */
        else{
            counter++;
        }
    } /* end for(ii) */
    
    free(usedChans);
    return(GRP_SUCCESS);
}
Пример #24
0
/* ADAPTIVE
 * --------
 * Input parameters:
 *   dataCol   - the column with the data
 *   numChans  - number of channels in groupCol and qualCol
 *   minCounts - the minimum number of counts desired in each group
 *   groupCol  - the GROUPING column
 *   qualCol   - the QUALITY column
 *   tabStops  - array giving channels with tabs or stops
 *   maxLength - maximum size of groups
 */
int grp_do_adaptive(double *dataCol, long numChans, double minCounts,
                    short *groupCol, short *qualCol, short *tabStops,
                    double maxLength, dsErrList *errList){
    
    short *usedChans;
    long ii, jj, tempLength, tempMax, curWidth = 0;
    long counter = 0;
    double groupCounts = 0.0;

    /* Check for obviously bad inputs */
    if(!dataCol || (numChans <= 0) || (minCounts <= 0) || !groupCol
       || !qualCol || !tabStops){
        if(errList)
            dsErrAdd(errList, dsDMGROUPBADPARAMERR, Accumulation,
                     Generic);
        else
            err_msg("ERROR: At least one input parameter has an "
                    "invalid value.\n");
        return(GRP_ERROR);
    }
    
    if(maxLength <= 0.0)
        maxLength = MAX_BIN_LENGTH;
   
    /* Create and initialize used channel list */
    usedChans = (short *) calloc(numChans, sizeof(short));
    for(ii = 0; ii < numChans; ii++){
        if(tabStops[ii] || (qualCol[ii] != 0))
            usedChans[ii] = GRP_TRUE;
        else
            usedChans[ii] = GRP_FALSE;
    }
    
    /* Main loop through adaptive group sizes */
    while((curWidth + 1) <= maxLength){
        curWidth++;
        
        /* Determine maxLength each time as it might be limited */
        tempLength = 0;
        tempMax = 0;
        for(ii = 0; ii < numChans; ii++){
            if(!usedChans[ii]){
                tempLength++;
                if(tempLength > tempMax)
                    tempMax = tempLength;
            }
            else
                tempLength = 0;
        }
        if(tempMax < maxLength)
            maxLength = tempMax;
        
        /* Iterate over each row for each group size */
        for(ii = 0; ii < (numChans - curWidth); ii++){
            if(usedChans[ii]) continue;
            groupCounts = 0.0;
            /* Try to make groups of the current width */
            for(jj = 0; jj < curWidth; jj++){
                if(usedChans[ii + jj])
                    break;
                groupCounts += dataCol[ii + jj];
                if(jj == curWidth - 1){
                    if(groupCounts >= minCounts){
                        /* Enough counts - let's group it */
                        mark_used(usedChans, ii, ii + jj);
                        create_group(groupCol, ii, ii + jj);
                        set_quality(qualCol, GRP_GOOD, ii, ii + jj);
                    }
                }
            } /* end for(jj) */
        } /* end for(ii) */
    } /* end while() */
    
    /* Put unused channels into "bad" groups */
    for(ii = 0; ii < numChans; ii++){
        /* Are we in a used channel? */
        if(usedChans[ii]){
            if(counter != 0){
                set_incomplete(groupCol, qualCol, ii - counter,
                               ii - 1);
                counter = 0;
            }
        }
        /* Are we at the end of the table? */
        else if(ii == (numChans - 1)){
            /* Does this complete a group? */
            if(counter != 0)
                set_incomplete(groupCol, qualCol, ii - counter, ii);
            else
                set_incomplete(groupCol, qualCol, ii, ii);
        }
        /* Are we at the end of a group */
        else if(usedChans[ii + 1]){
            set_incomplete(groupCol, qualCol, ii - counter, ii);
            counter = 0;
        }
        /* Are we at the beginning of a group? */
        else{
            counter++;
        }
    } /* end for(ii) */
    
    free(usedChans);
    return(GRP_SUCCESS);
}
Пример #25
0
/* ADAPTIVE_SNR
 * ------------
 * Input parameters:
 *   dataCol   - the column with the data
 *   numChans  - number of channels in groupCol and qualCol
 *   snr       - the signal-to-noise ratio threshold
 *   groupCol  - the GROUPING column
 *   qualCol   - the QUALITY column
 *   tabStops  - array giving channels with tabs or stops
 *   errorCol  - optional error column
 *   useErr    - if true, use errorCol data, else use counts
 *   maxLength - maximum size of groups
 */
int grp_do_adaptive_snr(double *dataCol, long numChans, double snr,
                        short *groupCol, short *qualCol,
                        short *tabStops, double *errorCol,
                        short useErr, double maxLength,
                        dsErrList *errList){

   long ii, jj, tempLength, tempMax, counter = 0;
   long curWidth = 0;
   double runningSnr    = 0.0;
   double runningSignal = 0.0;
   double runningNoise  = 0.0;
   short *usedChans;
   int returnVal = GRP_SUCCESS;

    /* Check for obviously bad inputs */
    if(!dataCol || (numChans <= 0) || (snr <= 0) || !groupCol
       || !qualCol || !tabStops || !errorCol){
        if(errList)
            dsErrAdd(errList, dsDMGROUPBADPARAMERR, Accumulation,
                     Generic);
        else
            err_msg("ERROR: At least one input parameter has an "
                    "invalid value.\n");
        return(GRP_ERROR);
    }
    
    if(maxLength <= 0.0)
       maxLength = MAX_BIN_LENGTH;
   
   /* Create used channel list */
   usedChans = (short *) calloc(numChans, sizeof(short));
   for(ii = 0; ii < numChans; ii++){
      if(tabStops[ii] || (qualCol[ii] != 0))
         usedChans[ii] = GRP_TRUE;
      else
         usedChans[ii] = GRP_FALSE;
   }
   /* Main loop through adaptive group sizes */
   while((curWidth + 1) <= maxLength){
      curWidth++;

      /* Determine maxLength each time as it might be limited */
      tempLength = 0;
      tempMax = 0;
      for(ii = 0; ii < numChans; ii++){
         if(!usedChans[ii]){
            tempLength++;
            if(tempLength > tempMax)
               tempMax = tempLength;
         }
         else
            tempLength = 0;
      }
      if(tempMax < maxLength)
         maxLength = tempMax;

      /* Iterate over each row for each group size */
      for(ii = 0; ii < (numChans - curWidth); ii++){
         if(usedChans[ii]) continue;
         runningSnr = 0.0;
	 runningSignal = 0.0;
	 runningNoise = 0.0;
         /* Try to make groups of the current width */
         for(jj = 0; jj < curWidth; jj++){
            if(usedChans[ii + jj]){
               runningSnr = 0.0;
	       runningSignal = 0.0;
	       runningNoise = 0.0;
               break;
            }
            if(useErr){
                if(!errorCol[ii + jj]){
                    if(errList)
                        dsErrAdd(errList, dsDMGROUPZEROERRORERR,
                                 Accumulation, Generic);
                    else
                        err_msg("WARNING: The supplied error column "
                                "contains zero-valued data.");
                    returnVal = GRP_WARNING;
                }
                else {
                    runningSnr += pow((dataCol[ii + jj] /
                                       errorCol[ii + jj]), 2);
		    runningSignal += dataCol[ii+jj];
		    runningNoise += (errorCol[ii+jj]*errorCol[ii+jj]);
		    runningSnr = runningSignal/runningNoise;
		    runningSnr *= runningSnr;

		}
            }
            else if(dataCol[ii + jj]) {
	      runningSignal += dataCol[ii];
	      runningSnr = runningSignal;
	    }
            if(jj == (curWidth - 1)){
               if(sqrt(runningSnr) > snr){
                  /* Enough counts - let's group it */
                  mark_used(usedChans, ii, ii + jj);
                  create_group(groupCol, ii, ii + jj);
                  set_quality(qualCol, GRP_GOOD, ii, ii + jj);
               }
            }
         } /* end for(jj) */
      } /* end for(ii) */
   } /* end while() */

   /* Put unused channels into "bad" groups */
   for(ii = 0; ii < numChans; ii++){
      /* Are we in a used channel? */
      if(usedChans[ii]){
         if(counter != 0){
            set_incomplete(groupCol, qualCol, ii - counter, ii - 1);
            counter = 0;
         }
      }
      /* Are we at the end of the table? */
      else if(ii == (numChans - 1)){
         /* Does this complete a group? */
         if(counter != 0)
            set_incomplete(groupCol, qualCol, ii - counter, ii);
         else
            set_incomplete(groupCol, qualCol, ii, ii);
      }
      /* Are we at the end of a group */
      else if(usedChans[ii + 1]){
         set_incomplete(groupCol, qualCol, ii - counter, ii);
         counter = 0;
      }
      /* Are we at the beginning of a group? */
      else{
         counter++;
      }
   } /* end for(ii) */

   free(usedChans);
   return(GRP_SUCCESS);
}
Пример #26
0
void
use_thunk (tree thunk_fndecl, bool emit_p)
{
    tree a, t, function, alias;
    tree virtual_offset;
    HOST_WIDE_INT fixed_offset, virtual_value;
    bool this_adjusting = DECL_THIS_THUNK_P (thunk_fndecl);

    /* We should have called finish_thunk to give it a name.  */
    gcc_assert (DECL_NAME (thunk_fndecl));

    /* We should never be using an alias, always refer to the
       aliased thunk.  */
    gcc_assert (!THUNK_ALIAS (thunk_fndecl));

    if (TREE_ASM_WRITTEN (thunk_fndecl))
        return;

    function = THUNK_TARGET (thunk_fndecl);
    if (DECL_RESULT (thunk_fndecl))
        /* We already turned this thunk into an ordinary function.
           There's no need to process this thunk again.  */
        return;

    if (DECL_THUNK_P (function))
        /* The target is itself a thunk, process it now.  */
        use_thunk (function, emit_p);

    /* Thunks are always addressable; they only appear in vtables.  */
    TREE_ADDRESSABLE (thunk_fndecl) = 1;

    /* Figure out what function is being thunked to.  It's referenced in
       this translation unit.  */
    TREE_ADDRESSABLE (function) = 1;
    mark_used (function);
    if (!emit_p)
        return;

    if (TARGET_USE_LOCAL_THUNK_ALIAS_P (function))
        alias = make_alias_for_thunk (function);
    else
        alias = function;

    fixed_offset = THUNK_FIXED_OFFSET (thunk_fndecl);
    virtual_offset = THUNK_VIRTUAL_OFFSET (thunk_fndecl);

    if (virtual_offset)
    {
        if (!this_adjusting)
            virtual_offset = BINFO_VPTR_FIELD (virtual_offset);
        virtual_value = tree_low_cst (virtual_offset, /*pos=*/0);
        gcc_assert (virtual_value);
    }
    else
        virtual_value = 0;

    /* And, if we need to emit the thunk, it's used.  */
    mark_used (thunk_fndecl);
    /* This thunk is actually defined.  */
    DECL_EXTERNAL (thunk_fndecl) = 0;
    /* The linkage of the function may have changed.  FIXME in linkage
       rewrite.  */
    TREE_PUBLIC (thunk_fndecl) = TREE_PUBLIC (function);
    DECL_VISIBILITY (thunk_fndecl) = DECL_VISIBILITY (function);
    DECL_VISIBILITY_SPECIFIED (thunk_fndecl)
        = DECL_VISIBILITY_SPECIFIED (function);
    if (DECL_ONE_ONLY (function))
        make_decl_one_only (thunk_fndecl);

    if (flag_syntax_only)
    {
        TREE_ASM_WRITTEN (thunk_fndecl) = 1;
        return;
    }

    push_to_top_level ();

    if (TARGET_USE_LOCAL_THUNK_ALIAS_P (function)
            && targetm.have_named_sections)
    {
        resolve_unique_section (function, 0, flag_function_sections);

        if (DECL_SECTION_NAME (function) != NULL && DECL_ONE_ONLY (function))
        {
            resolve_unique_section (thunk_fndecl, 0, flag_function_sections);

            /* Output the thunk into the same section as function.  */
            DECL_SECTION_NAME (thunk_fndecl) = DECL_SECTION_NAME (function);
        }
    }

    /* The back-end expects DECL_INITIAL to contain a BLOCK, so we
       create one.  */
    DECL_INITIAL (thunk_fndecl) = make_node (BLOCK);

    /* Set up cloned argument trees for the thunk.  */
    t = NULL_TREE;
    for (a = DECL_ARGUMENTS (function); a; a = TREE_CHAIN (a))
    {
        tree x = copy_node (a);
        TREE_CHAIN (x) = t;
        DECL_CONTEXT (x) = thunk_fndecl;
        SET_DECL_RTL (x, NULL_RTX);
        DECL_HAS_VALUE_EXPR_P (x) = 0;
        t = x;
    }
    a = nreverse (t);
    DECL_ARGUMENTS (thunk_fndecl) = a;
    BLOCK_VARS (DECL_INITIAL (thunk_fndecl)) = a;

    if (this_adjusting
            && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
                    virtual_value, alias))
    {
        const char *fnname;
        current_function_decl = thunk_fndecl;
        DECL_RESULT (thunk_fndecl)
            = build_decl (RESULT_DECL, 0, integer_type_node);
        fnname = XSTR (XEXP (DECL_RTL (thunk_fndecl), 0), 0);
        init_function_start (thunk_fndecl);
        current_function_is_thunk = 1;
        assemble_start_function (thunk_fndecl, fnname);

        targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
                                         fixed_offset, virtual_value, alias);

        assemble_end_function (thunk_fndecl, fnname);
        init_insn_lengths ();
        current_function_decl = 0;
        cfun = 0;
        TREE_ASM_WRITTEN (thunk_fndecl) = 1;
    }
    else
    {
        /* If this is a covariant thunk, or we don't have the necessary
        code for efficient thunks, generate a thunk function that
         just makes a call to the real function.  Unfortunately, this
         doesn't work for varargs.  */

        if (varargs_function_p (function))
            error ("generic thunk code fails for method %q#D which uses %<...%>",
                   function);

        DECL_RESULT (thunk_fndecl) = NULL_TREE;

        start_preparsed_function (thunk_fndecl, NULL_TREE, SF_PRE_PARSED);
        /* We don't bother with a body block for thunks.  */

        /* There's no need to check accessibility inside the thunk body.  */
        push_deferring_access_checks (dk_no_check);

        t = a;
        if (this_adjusting)
            t = thunk_adjust (t, /*this_adjusting=*/1,
                              fixed_offset, virtual_offset);

        /* Build up the call to the real function.  */
        t = tree_cons (NULL_TREE, t, NULL_TREE);
        for (a = TREE_CHAIN (a); a; a = TREE_CHAIN (a))
            t = tree_cons (NULL_TREE, a, t);
        t = nreverse (t);
        t = build_call (alias, t);
        CALL_FROM_THUNK_P (t) = 1;

        if (VOID_TYPE_P (TREE_TYPE (t)))
            finish_expr_stmt (t);
        else
        {
            if (!this_adjusting)
            {
                tree cond = NULL_TREE;

                if (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE)
                {
                    /* If the return type is a pointer, we need to
                       protect against NULL.  We know there will be an
                       adjustment, because that's why we're emitting a
                       thunk.  */
                    t = save_expr (t);
                    cond = cp_convert (boolean_type_node, t);
                }

                t = thunk_adjust (t, /*this_adjusting=*/0,
                                  fixed_offset, virtual_offset);
                if (cond)
                    t = build3 (COND_EXPR, TREE_TYPE (t), cond, t,
                                cp_convert (TREE_TYPE (t), integer_zero_node));
            }
            if (IS_AGGR_TYPE (TREE_TYPE (t)))
                t = build_cplus_new (TREE_TYPE (t), t);
            finish_return_stmt (t);
        }

        /* Since we want to emit the thunk, we explicitly mark its name as
        referenced.  */
        mark_decl_referenced (thunk_fndecl);

        /* But we don't want debugging information about it.  */
        DECL_IGNORED_P (thunk_fndecl) = 1;

        /* Re-enable access control.  */
        pop_deferring_access_checks ();

        thunk_fndecl = finish_function (0);
        tree_lowering_passes (thunk_fndecl);
        expand_body (thunk_fndecl);
    }

    pop_from_top_level ();
}