String
__go_int_array_to_string (const void* p, intgo len)
{
  const int32 *ints;
  intgo slen;
  intgo i;
  unsigned char *retdata;
  String ret;
  unsigned char *s;

  ints = (const int32 *) p;

  slen = 0;
  for (i = 0; i < len; ++i)
    {
      int32 v;

      v = ints[i];

      if (v < 0 || v > 0x10ffff)
	v = 0xfffd;
      else if (0xd800 <= v && v <= 0xdfff)
	v = 0xfffd;

      if (v <= 0x7f)
	slen += 1;
      else if (v <= 0x7ff)
	slen += 2;
      else if (v <= 0xffff)
	slen += 3;
      else
	slen += 4;
    }

  retdata = runtime_mallocgc ((uintptr) slen, FlagNoPointers, 1, 0);
  ret.str = retdata;
  ret.len = slen;

  s = retdata;
  for (i = 0; i < len; ++i)
    {
      int32 v;

      v = ints[i];

      /* If V is out of range for UTF-8, substitute the replacement
	 character.  */
      if (v < 0 || v > 0x10ffff)
	v = 0xfffd;
      else if (0xd800 <= v && v <= 0xdfff)
	v = 0xfffd;

      if (v <= 0x7f)
	*s++ = v;
      else if (v <= 0x7ff)
	{
	  *s++ = 0xc0 | ((v >> 6) & 0x1f);
	  *s++ = 0x80 | (v & 0x3f);
	}
      else if (v <= 0xffff)
Ejemplo n.º 2
0
String
Signame (intgo sig)
{
  const char* s = NULL;
  char buf[100];
  size_t len;
  byte *data;
  String ret;

#if defined(HAVE_STRSIGNAL)
  s = strsignal (sig);
#endif

  if (s == NULL)
    {
      snprintf(buf, sizeof buf, "signal %ld", (long) sig);
      s = buf;
    }
  len = __builtin_strlen (s);
  data = runtime_mallocgc (len, FlagNoPointers, 0, 0);
  __builtin_memcpy (data, s, len);
  ret.str = data;
  ret.len = len;
  return ret;
}
Ejemplo n.º 3
0
// Allocate a new g, with a stack big enough for stacksize bytes.
G*
runtime_malg(int32 stacksize, byte** ret_stack, size_t* ret_stacksize)
{
    G *newg;

    newg = runtime_malloc(sizeof(G));
    if(stacksize >= 0) {
#if USING_SPLIT_STACK
        int dont_block_signals = 0;

        *ret_stack = __splitstack_makecontext(stacksize,
                                              &newg->stack_context[0],
                                              ret_stacksize);
        __splitstack_block_signals_context(&newg->stack_context[0],
                                           &dont_block_signals, nil);
#else
        *ret_stack = runtime_mallocgc(stacksize, FlagNoProfiling|FlagNoGC, 0, 0);
        *ret_stacksize = stacksize;
        newg->gcinitial_sp = *ret_stack;
        newg->gcstack_size = stacksize;
        runtime_xadd(&runtime_stacks_sys, stacksize);
#endif
    }
    return newg;
}
Ejemplo n.º 4
0
static Hchan*
makechan(ChanType *t, int64 hint)
{
	Hchan *c;
	uintptr n;
	const Type *elem;

	elem = t->__element_type;

	// compiler checks this but be safe.
	if(elem->__size >= (1<<16))
		runtime_throw("makechan: invalid channel element type");

	if(hint < 0 || (intgo)hint != hint || (elem->__size > 0 && (uintptr)hint > (MaxMem - sizeof(*c)) / elem->__size))
		runtime_panicstring("makechan: size out of range");

	n = sizeof(*c);
	n = ROUND(n, elem->__align);

	// allocate memory in one call
	c = (Hchan*)runtime_mallocgc(sizeof(*c) + hint*elem->__size, (uintptr)t | TypeInfo_Chan, 0);
	c->elemsize = elem->__size;
	c->elemtype = elem;
	c->dataqsiz = hint;

	if(debug)
		runtime_printf("makechan: chan=%p; elemsize=%D; dataqsiz=%D\n",
			c, (int64)elem->__size, (int64)c->dataqsiz);

	return c;
}
Ejemplo n.º 5
0
void *
__go_allocate_trampoline (uintptr_t size, void *closure)
{
  uintptr_t ptr_size;
  uintptr_t full_size;
  unsigned char *ret;

  /* Because the garbage collector only looks at aligned addresses, we
     need to store the closure at an aligned address to ensure that it
     sees it.  */
  ptr_size = sizeof (void *);
  full_size = (((size + ptr_size - 1) / ptr_size) * ptr_size);
  full_size += ptr_size;

  runtime_lock (&trampoline_lock);

  if (full_size < trampoline_page_size - trampoline_page_used)
    trampoline_page = NULL;

  if (trampoline_page == NULL)
    {
      uintptr_t page_size;
      unsigned char *page;

      page_size = getpagesize ();
      __go_assert (page_size >= full_size);
      page = (unsigned char *) runtime_mallocgc (2 * page_size - 1, 0, 0, 0);
      page = (unsigned char *) (((uintptr_t) page + page_size - 1)
				& ~ (page_size - 1));

#ifdef HAVE_SYS_MMAN_H
      {
	int i;

	i = mprotect (page, page_size, PROT_READ | PROT_WRITE | PROT_EXEC);
	__go_assert (i == 0);
      }
#endif

      trampoline_page = page;
      trampoline_page_size = page_size;
      trampoline_page_used = 0;
    }

  ret = trampoline_page + trampoline_page_used;
  trampoline_page_used += full_size;

  runtime_unlock (&trampoline_lock);

  __builtin_memcpy (ret + full_size - ptr_size, &closure, ptr_size);

  return (void *) ret;
}
Ejemplo n.º 6
0
void *
unsafe_NewArray (const struct __go_type_descriptor *descriptor, intgo n)
{
  uint64 size;
  void *ret;

  size = n * descriptor->__size;
  if (size == 0)
    ret = &runtime_zerobase;
  else if ((descriptor->__code & GO_NO_POINTERS) != 0)
    ret = runtime_mallocgc (size, FlagNoPointers, 1, 1);
  else
    {
      ret = runtime_mallocgc (size, 0, 1, 1);

      if (UseSpanType)
	runtime_settype (ret, (uintptr) descriptor | TypeInfo_Array);
    }

  return ret;
}
Ejemplo n.º 7
0
struct __go_open_array
__go_string_to_byte_array (String str)
{
  unsigned char *data;
  struct __go_open_array ret;

  data = (unsigned char *) runtime_mallocgc (str.len, FlagNoPointers, 1, 0);
  __builtin_memcpy (data, str.str, str.len);
  ret.__values = (void *) data;
  ret.__count = str.len;
  ret.__capacity = str.len;
  return ret;
}
Ejemplo n.º 8
0
void *
unsafe_New (const struct __go_type_descriptor *descriptor)
{
  uint32 flag;
  void *ret;

  flag = (descriptor->__code & GO_NO_POINTERS) != 0 ? FlagNoPointers : 0;
  ret = runtime_mallocgc (descriptor->__size, flag, 1, 1);

  if (UseSpanType && flag == 0)
    runtime_settype (ret, (uintptr) descriptor | TypeInfo_SingleObject);

  return ret;
}
Ejemplo n.º 9
0
String
__go_byte_array_to_string (const void* p, intgo len)
{
    const unsigned char *bytes;
    unsigned char *retdata;
    String ret;

    bytes = (const unsigned char *) p;
    retdata = runtime_mallocgc ((uintptr) len, 0, FlagNoScan);
    __builtin_memcpy (retdata, bytes, len);
    ret.str = retdata;
    ret.len = len;
    return ret;
}
struct __go_string
__go_byte_array_to_string (const void* p, size_t len)
{
  const unsigned char *bytes;
  unsigned char *retdata;
  struct __go_string ret;

  bytes = (const unsigned char *) p;
  retdata = runtime_mallocgc (len, RefNoPointers, 1, 0);
  __builtin_memcpy (retdata, bytes, len);
  ret.__data = retdata;
  ret.__length = len;
  return ret;
}
Ejemplo n.º 11
0
void
__go_panic_msg (const char* msg)
{
  size_t len;
  unsigned char *sdata;
  struct __go_string s;
  struct __go_empty_interface arg;

  len = __builtin_strlen (msg);
  sdata = runtime_mallocgc (len, FlagNoPointers, 0, 0);
  __builtin_memcpy (sdata, msg, len);
  s.__data = sdata;
  s.__length = len;
  newErrorString(s, &arg);
  __go_panic (arg);
}
Ejemplo n.º 12
0
struct __go_open_array
__go_string_to_byte_array (String str)
{
  uintptr cap;
  unsigned char *data;
  struct __go_open_array ret;

  cap = runtime_roundupsize (str.len);
  data = (unsigned char *) runtime_mallocgc (cap, 0, FlagNoScan | FlagNoZero);
  __builtin_memcpy (data, str.str, str.len);
  if (cap != (uintptr) str.len)
    __builtin_memset (data + str.len, 0, cap - (uintptr) str.len);
  ret.__values = (void *) data;
  ret.__count = str.len;
  ret.__capacity = (intgo) cap;
  return ret;
}
Ejemplo n.º 13
0
struct __go_open_array
__go_string_to_int_array (String str)
{
  size_t c;
  const unsigned char *p;
  const unsigned char *pend;
  uintptr mem;
  uint32_t *data;
  uint32_t *pd;
  struct __go_open_array ret;

  c = 0;
  p = str.str;
  pend = p + str.len;
  while (p < pend)
    {
      int rune;

      ++c;
      p += __go_get_rune (p, pend - p, &rune);
    }

  if (c > MaxMem / sizeof (uint32_t))
    runtime_throw ("out of memory");

  mem = runtime_roundupsize (c * sizeof (uint32_t));
  data = (uint32_t *) runtime_mallocgc (mem, 0, FlagNoScan | FlagNoZero);
  p = str.str;
  pd = data;
  while (p < pend)
    {
      int rune;

      p += __go_get_rune (p, pend - p, &rune);
      *pd++ = rune;
    }
  if (mem > (uintptr) c * sizeof (uint32_t))
    __builtin_memset (data + c, 0, mem - (uintptr) c * sizeof (uint32_t));
  ret.__values = (void *) data;
  ret.__count = c;
  ret.__capacity = (intgo) (mem / sizeof (uint32_t));
  return ret;
}
Ejemplo n.º 14
0
Archivo: proc.c Proyecto: Sunmonds/gcc
G*
runtime_malg(int32 stacksize, byte** ret_stack, size_t* ret_stacksize)
{
	G *newg;

	newg = runtime_malloc(sizeof(G));
	if(stacksize >= 0) {
#if USING_SPLIT_STACK
		*ret_stack = __splitstack_makecontext(stacksize,
						      &newg->stack_context[0],
						      ret_stacksize);
#else
		*ret_stack = runtime_mallocgc(stacksize, FlagNoProfiling|FlagNoGC, 0, 0);
		*ret_stacksize = stacksize;
		newg->gcinitial_sp = *ret_stack;
		newg->gcstack_size = stacksize;
#endif
	}
	return newg;
}
Ejemplo n.º 15
0
struct __go_string
__go_string_plus (struct __go_string s1, struct __go_string s2)
{
  int len;
  unsigned char *retdata;
  struct __go_string ret;

  if (s1.__length == 0)
    return s2;
  else if (s2.__length == 0)
    return s1;

  len = s1.__length + s2.__length;
  retdata = runtime_mallocgc (len, FlagNoPointers, 1, 0);
  __builtin_memcpy (retdata, s1.__data, s1.__length);
  __builtin_memcpy (retdata + s1.__length, s2.__data, s2.__length);
  ret.__data = retdata;
  ret.__length = len;
  return ret;
}
Ejemplo n.º 16
0
String
__go_string_plus (String s1, String s2)
{
  int len;
  byte *retdata;
  String ret;

  if (s1.len == 0)
    return s2;
  else if (s2.len == 0)
    return s1;

  len = s1.len + s2.len;
  retdata = runtime_mallocgc (len, FlagNoPointers, 1, 0);
  __builtin_memcpy (retdata, s1.str, s1.len);
  __builtin_memcpy (retdata + s1.len, s2.str, s2.len);
  ret.str = retdata;
  ret.len = len;
  return ret;
}
Ejemplo n.º 17
0
int32
runtime_netpollopen(uintptr fd, PollDesc *pd)
{
	byte b;

	runtime_lock(&selectlock);

	if((int)fd >= allocated) {
		int c;
		PollDesc **n;

		c = allocated;

		runtime_unlock(&selectlock);

		while((int)fd >= c)
			c *= 2;
		n = runtime_mallocgc(c * sizeof(PollDesc *), 0,
				     FlagNoScan|FlagNoProfiling|FlagNoInvokeGC);

		runtime_lock(&selectlock);

		if(c > allocated) {
			__builtin_memcpy(n, data, allocated * sizeof(PollDesc *));
			allocated = c;
			data = n;
		}
	}
	FD_SET(fd, &fds);
	data[fd] = pd;

	runtime_unlock(&selectlock);

	b = 0;
	write(wrwake, &b, sizeof b);

	return 0;
}
Ejemplo n.º 18
0
struct __go_open_array
__go_make_slice2 (const struct __go_type_descriptor *td, uintptr_t len,
		  uintptr_t cap)
{
  const struct __go_slice_type* std;
  int ilen;
  int icap;
  uintptr_t size;
  struct __go_open_array ret;
  unsigned int flag;

  __go_assert (td->__code == GO_SLICE);
  std = (const struct __go_slice_type *) td;

  ilen = (int) len;
  if (ilen < 0 || (uintptr_t) ilen != len)
    runtime_panicstring ("makeslice: len out of range");

  icap = (int) cap;
  if (cap < len
      || (uintptr_t) icap != cap
      || (std->__element_type->__size > 0
	  && cap > (uintptr_t) -1U / std->__element_type->__size))
    runtime_panicstring ("makeslice: cap out of range");

  ret.__count = ilen;
  ret.__capacity = icap;

  size = cap * std->__element_type->__size;
  flag = ((std->__element_type->__code & GO_NO_POINTERS) != 0
	  ? FlagNoPointers
	  : 0);
  ret.__values = runtime_mallocgc (size, flag, 1, 1);

  return ret;
}
Ejemplo n.º 19
0
// add finalizer; caller is responsible for making sure not already in table
void
runtime_addfinalizer(void *p, void (*f)(void*), const struct __go_func_type *ft)
{
	Fintab newtab;
	int32 i;
	byte *base;
	Finalizer *e;
	
	e = nil;
	if(f != nil) {
		e = runtime_mal(sizeof *e);
		e->fn = f;
		e->ft = ft;
	}

	if(!__sync_bool_compare_and_swap(&m->holds_finlock, 0, 1))
		runtime_throw("finalizer deadlock");

	runtime_lock(&finlock);
	if(!runtime_mlookup(p, &base, nil, nil) || p != base) {
		runtime_unlock(&finlock);
		__sync_bool_compare_and_swap(&m->holds_finlock, 1, 0);
		runtime_throw("addfinalizer on invalid pointer");
	}
	if(f == nil) {
		lookfintab(&fintab, p, 1);
		goto unlock;
	}

	if(lookfintab(&fintab, p, 0)) {
		runtime_unlock(&finlock);
		__sync_bool_compare_and_swap(&m->holds_finlock, 1, 0);
		runtime_throw("double finalizer");
	}
	runtime_setblockspecial(p);

	if(fintab.nkey >= fintab.max/2+fintab.max/4) {
		// keep table at most 3/4 full:
		// allocate new table and rehash.

		runtime_memclr((byte*)&newtab, sizeof newtab);
		newtab.max = fintab.max;
		if(newtab.max == 0)
			newtab.max = 3*3*3;
		else if(fintab.ndead < fintab.nkey/2) {
			// grow table if not many dead values.
			// otherwise just rehash into table of same size.
			newtab.max *= 3;
		}

		newtab.key = runtime_mallocgc(newtab.max*sizeof newtab.key[0], FlagNoPointers, 0, 1);
		newtab.val = runtime_mallocgc(newtab.max*sizeof newtab.val[0], 0, 0, 1);

		for(i=0; i<fintab.max; i++) {
			void *k;

			k = fintab.key[i];
			if(k != nil && k != (void*)-1)
				addfintab(&newtab, k, fintab.val[i]);
		}
		runtime_free(fintab.key);
		runtime_free(fintab.val);
		fintab = newtab;
	}

	addfintab(&fintab, p, e);
 unlock:
	runtime_unlock(&finlock);

	__sync_bool_compare_and_swap(&m->holds_finlock, 1, 0);

	if(__sync_bool_compare_and_swap(&m->gcing_for_finlock, 1, 0)) {
		__go_run_goroutine_gc(200);
	}
}
Ejemplo n.º 20
0
void *
__go_new_nopointers (size_t size)
{
  return runtime_mallocgc (size, RefNoPointers, 1, 1);
}
Ejemplo n.º 21
0
void *
__go_new (size_t size)
{
  return runtime_mallocgc (size, 0, 1, 1);
}
Ejemplo n.º 22
0
// add finalizer; caller is responsible for making sure not already in table
void
runtime_addfinalizer(void *p, void (*f)(void*), const struct __go_func_type *ft)
{
	Fintab newtab;
	int32 i;
	uint32 *ref;
	byte *base;
	Finalizer *e;
	
	e = nil;
	if(f != nil) {
		e = runtime_mal(sizeof *e);
		e->fn = f;
		e->ft = ft;
	}

	runtime_lock(&finlock);
	if(!runtime_mlookup(p, &base, nil, nil, &ref) || p != base) {
		runtime_unlock(&finlock);
		runtime_throw("addfinalizer on invalid pointer");
	}
	if(f == nil) {
		if(*ref & RefHasFinalizer) {
			lookfintab(&fintab, p, 1);
			*ref &= ~RefHasFinalizer;
		}
		runtime_unlock(&finlock);
		return;
	}

	if(*ref & RefHasFinalizer) {
		runtime_unlock(&finlock);
		runtime_throw("double finalizer");
	}
	*ref |= RefHasFinalizer;

	if(fintab.nkey >= fintab.max/2+fintab.max/4) {
		// keep table at most 3/4 full:
		// allocate new table and rehash.

		runtime_memclr((byte*)&newtab, sizeof newtab);
		newtab.max = fintab.max;
		if(newtab.max == 0)
			newtab.max = 3*3*3;
		else if(fintab.ndead < fintab.nkey/2) {
			// grow table if not many dead values.
			// otherwise just rehash into table of same size.
			newtab.max *= 3;
		}

		newtab.key = runtime_mallocgc(newtab.max*sizeof newtab.key[0], RefNoPointers, 0, 1);
		newtab.val = runtime_mallocgc(newtab.max*sizeof newtab.val[0], 0, 0, 1);

		for(i=0; i<fintab.max; i++) {
			void *k;

			k = fintab.key[i];
			if(k != nil && k != (void*)-1)
				addfintab(&newtab, k, fintab.val[i]);
		}
		runtime_free(fintab.key);
		runtime_free(fintab.val);
		fintab = newtab;
	}

	addfintab(&fintab, p, e);
	runtime_unlock(&finlock);
}