Ejemplo n.º 1
0
int
__pthread_mutex_trylock (pthread_mutex_t *mtxp)
{
  struct __pthread *self;
  int ret;

  switch (MTX_TYPE (mtxp))
    {
    case PT_MTX_NORMAL:
      ret = lll_trylock (&mtxp->__lock);
      if (ret)
	ret = EBUSY;
      break;

    case PT_MTX_RECURSIVE:
      self = _pthread_self ();
      if (mtx_owned_p (mtxp, self, mtxp->__flags))
	{
	  if (__glibc_unlikely (mtxp->__cnt + 1 == 0))
	    return EAGAIN;

	  ++mtxp->__cnt;
	  ret = 0;
	}
      else if ((ret = lll_trylock (&mtxp->__lock)) == 0)
	{
	  mtx_set_owner (mtxp, self, mtxp->__flags);
	  mtxp->__cnt = 1;
	}
      else
	ret = EBUSY;

      break;

    case PT_MTX_ERRORCHECK:
      self = _pthread_self ();
      if ((ret = lll_trylock (&mtxp->__lock)) == 0)
	mtx_set_owner (mtxp, self, mtxp->__flags);
      else
	ret = EBUSY;
      break;

    case PT_MTX_NORMAL | PTHREAD_MUTEX_ROBUST:
    case PT_MTX_RECURSIVE | PTHREAD_MUTEX_ROBUST:
    case PT_MTX_ERRORCHECK | PTHREAD_MUTEX_ROBUST:
      self = _pthread_self ();
      ROBUST_LOCK (self, mtxp, __lll_robust_trylock);
      break;

    default:
      ret = EINVAL;
      break;
    }

  return ret;
}
Ejemplo n.º 2
0
int pthread_mutex_timedlock (pthread_mutex_t *mtxp, 
  const struct timespec *tsp)
{
  struct pthread *self = PTHREAD_SELF;
  int ret, flags = mtxp->__flags & GSYNC_SHARED;

  switch (MTX_TYPE (mtxp))
    {
      case PTHREAD_MUTEX_NORMAL:
        ret = lll_abstimed_lock (&mtxp->__lock, tsp, flags);
        break;

      case PTHREAD_MUTEX_RECURSIVE:
        if (mtx_owned_p (mtxp, self, flags))
          {
            if (__glibc_unlikely (mtxp->__cnt + 1 == 0))
              return (EAGAIN);

            ++mtxp->__cnt;
            ret = 0;
          }
        else if ((ret = lll_abstimed_lock (&mtxp->__lock,
            tsp, flags)) == 0)
          {
            mtx_set_owner (mtxp, self, flags);
            mtxp->__cnt = 1;
          }

        break;

      case PTHREAD_MUTEX_ERRORCHECK:
        if (mtxp->__owner_id == self->id)
          return (EDEADLK);
        else if ((ret = lll_abstimed_lock (&mtxp->__lock,
            tsp, flags)) == 0)
          mtx_set_owner (mtxp, self, flags);

        break;

      case PTHREAD_MUTEX_NORMAL     | PTHREAD_MUTEX_ROBUST:
      case PTHREAD_MUTEX_RECURSIVE  | PTHREAD_MUTEX_ROBUST:
      case PTHREAD_MUTEX_ERRORCHECK | PTHREAD_MUTEX_ROBUST:
        ROBUST_LOCK (self, mtxp, lll_robust_abstimed_lock, tsp, flags);
        break;

      default:
        ret = EINVAL;
        break;
    }

  return (ret);
}
Ejemplo n.º 3
0
int pthread_mutex_trylock (pthread_mutex_t *mtxp)
{
  struct pthread *self = PTHREAD_SELF;
  int ret;

  switch (MTX_TYPE (mtxp))
    {
      case PTHREAD_MUTEX_NORMAL:
        ret = lll_trylock (&mtxp->__lock);
        break;

      case PTHREAD_MUTEX_RECURSIVE:
        if (mtx_owned_p (mtxp, self, mtxp->__flags))
          {
            if (__glibc_unlikely (mtxp->__cnt + 1 == 0))
              return (EAGAIN);

            ++mtxp->__cnt;
            ret = 0;
          }
        else if ((ret = lll_trylock (&mtxp->__lock)) == 0)
          {
            mtx_set_owner (mtxp, self, mtxp->__flags);
            mtxp->__cnt = 1;
          }

        break;

      case PTHREAD_MUTEX_ERRORCHECK:
        if (mtx_owned_p (mtxp, self, mtxp->__flags))
          ret = EDEADLK;
        else if ((ret = lll_trylock (&mtxp->__lock)) == 0)
          mtx_set_owner (mtxp, self, mtxp->__flags);

        break;

      case PTHREAD_MUTEX_NORMAL     | PTHREAD_MUTEX_ROBUST:
      case PTHREAD_MUTEX_RECURSIVE  | PTHREAD_MUTEX_ROBUST:
      case PTHREAD_MUTEX_ERRORCHECK | PTHREAD_MUTEX_ROBUST:
        ROBUST_LOCK (self, mtxp, lll_robust_trylock);
        break;

      default:
        ret = EINVAL;
        break;
    }

  return (ret);
}
Ejemplo n.º 4
0
int pthread_mutex_lock (pthread_mutex_t *mtxp)
{
  struct pthread *self = PTHREAD_SELF;
  int flags = mtxp->__flags & GSYNC_SHARED;
  int ret = 0;

  switch (MTX_TYPE (mtxp))
    {
      case PTHREAD_MUTEX_NORMAL:
        lll_lock (&mtxp->__lock, flags);
        break;

      case PTHREAD_MUTEX_RECURSIVE:
        if (mtx_owned_p (mtxp, self, flags))
          {
            if (__glibc_unlikely (mtxp->__cnt + 1 == 0))
              return (EAGAIN);

            ++mtxp->__cnt;
            return (ret);
          }

        lll_lock (&mtxp->__lock, flags);
        mtx_set_owner (mtxp, self, flags);
        mtxp->__cnt = 1;
        break;

      case PTHREAD_MUTEX_ERRORCHECK:
        if (mtx_owned_p (mtxp, self, flags))
          return (EDEADLK);

        lll_lock (&mtxp->__lock, flags);
        mtx_set_owner (mtxp, self, flags);
        break;

      case PTHREAD_MUTEX_NORMAL     | PTHREAD_MUTEX_ROBUST:
      case PTHREAD_MUTEX_RECURSIVE  | PTHREAD_MUTEX_ROBUST:
      case PTHREAD_MUTEX_ERRORCHECK | PTHREAD_MUTEX_ROBUST:
        ROBUST_LOCK (self, mtxp, lll_robust_lock, flags);
        break;

      default:
        ret = EINVAL;
        break;
    }

  return (ret);
}