Esempio n. 1
0
static void
validate_deref_chain(nir_deref *deref, validate_state *state)
{
   assert(deref->child == NULL || ralloc_parent(deref->child) == deref);

   nir_deref *parent = NULL;
   while (deref != NULL) {
      switch (deref->deref_type) {
      case nir_deref_type_array:
         assert(deref->type == glsl_get_array_element(parent->type));
         if (nir_deref_as_array(deref)->deref_array_type ==
             nir_deref_array_type_indirect)
            validate_src(&nir_deref_as_array(deref)->indirect, state);
         break;

      case nir_deref_type_struct:
         assert(deref->type ==
                glsl_get_struct_field(parent->type,
                                      nir_deref_as_struct(deref)->index));
         break;

      case nir_deref_type_var:
         break;

      default:
         assert(!"Invalid deref type");
         break;
      }

      parent = deref;
      deref = deref->child;
   }
}
Esempio n. 2
0
static void
validate_deref_chain(nir_deref *deref, nir_variable_mode mode,
                     validate_state *state)
{
   validate_assert(state, deref->child == NULL || ralloc_parent(deref->child) == deref);

   nir_deref *parent = NULL;
   while (deref != NULL) {
      switch (deref->deref_type) {
      case nir_deref_type_array:
         if (mode == nir_var_shared) {
            /* Shared variables have a bit more relaxed rules because we need
             * to be able to handle array derefs on vectors.  Fortunately,
             * nir_lower_io handles these just fine.
             */
            validate_assert(state, glsl_type_is_array(parent->type) ||
                                   glsl_type_is_matrix(parent->type) ||
                                   glsl_type_is_vector(parent->type));
         } else {
            /* Most of NIR cannot handle array derefs on vectors */
            validate_assert(state, glsl_type_is_array(parent->type) ||
                                   glsl_type_is_matrix(parent->type));
         }
         validate_assert(state, deref->type == glsl_get_array_element(parent->type));
         if (nir_deref_as_array(deref)->deref_array_type ==
             nir_deref_array_type_indirect)
            validate_src(&nir_deref_as_array(deref)->indirect, state, 32, 1);
         break;

      case nir_deref_type_struct:
         assume(parent); /* cannot happen: deref change starts w/ nir_deref_var */
         validate_assert(state, deref->type ==
                glsl_get_struct_field(parent->type,
                                      nir_deref_as_struct(deref)->index));
         break;

      case nir_deref_type_var:
         break;

      default:
         validate_assert(state, !"Invalid deref type");
         break;
      }

      parent = deref;
      deref = deref->child;
   }
}
static bool
constant_fold_deref(nir_instr *instr, nir_deref_var *deref)
{
   bool progress = false;

   for (nir_deref *tail = deref->deref.child; tail; tail = tail->child) {
      if (tail->deref_type != nir_deref_type_array)
         continue;

      nir_deref_array *arr = nir_deref_as_array(tail);

      if (arr->deref_array_type == nir_deref_array_type_indirect &&
          arr->indirect.is_ssa &&
          arr->indirect.ssa->parent_instr->type == nir_instr_type_load_const) {
         nir_load_const_instr *indirect =
            nir_instr_as_load_const(arr->indirect.ssa->parent_instr);

         arr->base_offset += indirect->value.u[0];

         /* Clear out the source */
         nir_instr_rewrite_src(instr, &arr->indirect, nir_src_for_ssa(NULL));

         arr->deref_array_type = nir_deref_array_type_direct;

         progress = true;
      }
   }

   return progress;
}
Esempio n. 4
0
static bool
deref_has_indirect(nir_deref_var *deref)
{
   for (nir_deref *tail = deref->deref.child; tail; tail = tail->child) {
      if (tail->deref_type == nir_deref_type_array) {
         nir_deref_array *arr = nir_deref_as_array(tail);
         if (arr->deref_array_type == nir_deref_array_type_indirect)
            return true;
      }
   }

   return false;
}
Esempio n. 5
0
/* Calculate the sampler index based on array indicies and also
 * calculate the base uniform location for struct members.
 */
static void
calc_sampler_offsets(nir_deref *tail, nir_tex_instr *instr,
                     unsigned *array_elements, nir_ssa_def **indirect,
                     nir_builder *b, unsigned *location)
{
   if (tail->child == NULL)
      return;

   switch (tail->child->deref_type) {
   case nir_deref_type_array: {
      nir_deref_array *deref_array = nir_deref_as_array(tail->child);

      assert(deref_array->deref_array_type != nir_deref_array_type_wildcard);

      calc_sampler_offsets(tail->child, instr, array_elements,
                           indirect, b, location);
      instr->sampler_index += deref_array->base_offset * *array_elements;

      if (deref_array->deref_array_type == nir_deref_array_type_indirect) {
         nir_ssa_def *mul =
            nir_imul(b, nir_imm_int(b, *array_elements),
                     nir_ssa_for_src(b, deref_array->indirect, 1));

         nir_instr_rewrite_src(&instr->instr, &deref_array->indirect,
                               NIR_SRC_INIT);

         if (*indirect) {
            *indirect = nir_iadd(b, *indirect, mul);
         } else {
            *indirect = mul;
         }
      }

      *array_elements *= glsl_get_length(tail->type);
       break;
   }

   case nir_deref_type_struct: {
      nir_deref_struct *deref_struct = nir_deref_as_struct(tail->child);
      *location += glsl_get_record_location_offset(tail->type, deref_struct->index);
      calc_sampler_offsets(tail->child, instr, array_elements,
                           indirect, b, location);
      break;
   }

   default:
      unreachable("Invalid deref type");
      break;
   }
}
Esempio n. 6
0
/* Walks down the deref chain and returns the next deref in the chain whose
 * child is a wildcard.  In other words, given the chain  a[1].foo[*].bar,
 * this function will return the deref to foo.  Calling it a second time
 * with the [*].bar, it will return NULL.
 */
static nir_deref *
deref_next_wildcard_parent(nir_deref *deref)
{
    for (nir_deref *tail = deref; tail->child; tail = tail->child) {
        if (tail->child->deref_type != nir_deref_type_array)
            continue;

        nir_deref_array *arr = nir_deref_as_array(tail->child);

        if (arr->deref_array_type == nir_deref_array_type_wildcard)
            return tail;
    }

    return NULL;
}
static void
emit_load_store(nir_builder *b, nir_intrinsic_instr *orig_instr,
                nir_deref_var *deref, nir_deref *tail,
                nir_ssa_def **dest, nir_ssa_def *src)
{
   for (; tail->child; tail = tail->child) {
      if (tail->child->deref_type != nir_deref_type_array)
         continue;

      nir_deref_array *arr = nir_deref_as_array(tail->child);
      if (arr->deref_array_type != nir_deref_array_type_indirect)
         continue;

      int length = glsl_get_length(tail->type);

      emit_indirect_load_store(b, orig_instr, deref, tail, -arr->base_offset,
                               length - arr->base_offset, dest, src);
      return;
   }

   assert(tail && tail->child == NULL);

   /* We reached the end of the deref chain.  Emit the instruction */

   if (src == NULL) {
      /* This is a load instruction */
      nir_intrinsic_instr *load =
         nir_intrinsic_instr_create(b->shader, nir_intrinsic_load_var);
      load->num_components = orig_instr->num_components;
      load->variables[0] =
         nir_deref_as_var(nir_copy_deref(load, &deref->deref));
      unsigned bit_size = orig_instr->dest.ssa.bit_size;
      nir_ssa_dest_init(&load->instr, &load->dest,
                        load->num_components, bit_size, NULL);
      nir_builder_instr_insert(b, &load->instr);
      *dest = &load->dest.ssa;
   } else {
      /* This is a store instruction */
      nir_intrinsic_instr *store =
         nir_intrinsic_instr_create(b->shader, nir_intrinsic_store_var);
      store->num_components = orig_instr->num_components;
      nir_intrinsic_set_write_mask(store, nir_intrinsic_write_mask(orig_instr));
      store->variables[0] =
         nir_deref_as_var(nir_copy_deref(store, &deref->deref));
      store->src[0] = nir_src_for_ssa(src);
      nir_builder_instr_insert(b, &store->instr);
   }
}
Esempio n. 8
0
/* \sa deref_may_be_aliased */
static bool
deref_may_be_aliased_node(struct deref_node *node, nir_deref *deref,
                          struct lower_variables_state *state)
{
   if (deref->child == NULL) {
      return false;
   } else {
      switch (deref->child->deref_type) {
      case nir_deref_type_array: {
         nir_deref_array *arr = nir_deref_as_array(deref->child);
         if (arr->deref_array_type == nir_deref_array_type_indirect)
            return true;

         /* If there is an indirect at this level, we're aliased. */
         if (node->indirect)
            return true;

         assert(arr->deref_array_type == nir_deref_array_type_direct);

         if (node->children[arr->base_offset] &&
             deref_may_be_aliased_node(node->children[arr->base_offset],
                                       deref->child, state))
            return true;

         if (node->wildcard &&
             deref_may_be_aliased_node(node->wildcard, deref->child, state))
            return true;

         return false;
      }

      case nir_deref_type_struct: {
         nir_deref_struct *str = nir_deref_as_struct(deref->child);
         if (node->children[str->index]) {
             return deref_may_be_aliased_node(node->children[str->index],
                                              deref->child, state);
         } else {
            return false;
         }
      }

      default:
         unreachable("Invalid nir_deref child type");
      }
   }
}
Esempio n. 9
0
struct vtn_ssa_value *
vtn_local_load(struct vtn_builder *b, nir_deref_var *src)
{
   nir_deref *src_tail = get_deref_tail(src);
   struct vtn_ssa_value *val = vtn_create_ssa_value(b, src_tail->type);
   _vtn_local_load_store(b, true, src, src_tail, val);

   if (src_tail->child) {
      nir_deref_array *vec_deref = nir_deref_as_array(src_tail->child);
      assert(vec_deref->deref.child == NULL);
      val->type = vec_deref->deref.type;
      if (vec_deref->deref_array_type == nir_deref_array_type_direct)
         val->def = vtn_vector_extract(b, val->def, vec_deref->base_offset);
      else
         val->def = vtn_vector_extract_dynamic(b, val->def,
                                               vec_deref->indirect.ssa);
   }

   return val;
}
Esempio n. 10
0
void
vtn_local_store(struct vtn_builder *b, struct vtn_ssa_value *src,
                nir_deref_var *dest)
{
   nir_deref *dest_tail = get_deref_tail(dest);

   if (dest_tail->child) {
      struct vtn_ssa_value *val = vtn_create_ssa_value(b, dest_tail->type);
      _vtn_local_load_store(b, true, dest, dest_tail, val);
      nir_deref_array *deref = nir_deref_as_array(dest_tail->child);
      assert(deref->deref.child == NULL);
      if (deref->deref_array_type == nir_deref_array_type_direct)
         val->def = vtn_vector_insert(b, val->def, src->def,
                                      deref->base_offset);
      else
         val->def = vtn_vector_insert_dynamic(b, val->def, src->def,
                                              deref->indirect.ssa);
      _vtn_local_load_store(b, false, dest, dest_tail, val);
   } else {
      _vtn_local_load_store(b, false, dest, dest_tail, src);
   }
}
Esempio n. 11
0
/* \sa foreach_deref_node_match */
static bool
foreach_deref_node_worker(struct deref_node *node, nir_deref *deref,
                          bool (* cb)(struct deref_node *node,
                                      struct lower_variables_state *state),
                          struct lower_variables_state *state)
{
   if (deref->child == NULL) {
      return cb(node, state);
   } else {
      switch (deref->child->deref_type) {
      case nir_deref_type_array: {
         nir_deref_array *arr = nir_deref_as_array(deref->child);
         assert(arr->deref_array_type == nir_deref_array_type_direct);
         if (node->children[arr->base_offset] &&
             !foreach_deref_node_worker(node->children[arr->base_offset],
                                        deref->child, cb, state))
            return false;

         if (node->wildcard &&
             !foreach_deref_node_worker(node->wildcard,
                                        deref->child, cb, state))
            return false;

         return true;
      }

      case nir_deref_type_struct: {
         nir_deref_struct *str = nir_deref_as_struct(deref->child);
         return foreach_deref_node_worker(node->children[str->index],
                                          deref->child, cb, state);
      }

      default:
         unreachable("Invalid deref child type");
      }
   }
}
Esempio n. 12
0
/* This function recursively walks the given deref chain and replaces the
 * given copy instruction with an equivalent sequence load/store
 * operations.
 *
 * @copy_instr    The copy instruction to replace; new instructions will be
 *                inserted before this one
 *
 * @dest_head     The head of the destination variable deref chain
 *
 * @src_head      The head of the source variable deref chain
 *
 * @dest_tail     The current tail of the destination variable deref chain;
 *                this is used for recursion and external callers of this
 *                function should call it with tail == head
 *
 * @src_tail      The current tail of the source variable deref chain;
 *                this is used for recursion and external callers of this
 *                function should call it with tail == head
 *
 * @state         The current variable lowering state
 */
static void
emit_copy_load_store(nir_intrinsic_instr *copy_instr,
                     nir_deref_var *dest_head, nir_deref_var *src_head,
                     nir_deref *dest_tail, nir_deref *src_tail, void *mem_ctx)
{
    /* Find the next pair of wildcards */
    nir_deref *src_arr_parent = deref_next_wildcard_parent(src_tail);
    nir_deref *dest_arr_parent = deref_next_wildcard_parent(dest_tail);

    if (src_arr_parent || dest_arr_parent) {
        /* Wildcards had better come in matched pairs */
        assert(dest_arr_parent && dest_arr_parent);

        nir_deref_array *src_arr = nir_deref_as_array(src_arr_parent->child);
        nir_deref_array *dest_arr = nir_deref_as_array(dest_arr_parent->child);

        unsigned length = glsl_get_length(src_arr_parent->type);
        /* The wildcards should represent the same number of elements */
        assert(length == glsl_get_length(dest_arr_parent->type));
        assert(length > 0);

        /* Walk over all of the elements that this wildcard refers to and
         * call emit_copy_load_store on each one of them */
        src_arr->deref_array_type = nir_deref_array_type_direct;
        dest_arr->deref_array_type = nir_deref_array_type_direct;
        for (unsigned i = 0; i < length; i++) {
            src_arr->base_offset = i;
            dest_arr->base_offset = i;
            emit_copy_load_store(copy_instr, dest_head, src_head,
                                 &dest_arr->deref, &src_arr->deref, mem_ctx);
        }
        src_arr->deref_array_type = nir_deref_array_type_wildcard;
        dest_arr->deref_array_type = nir_deref_array_type_wildcard;
    } else {
        /* In this case, we have no wildcards anymore, so all we have to do
         * is just emit the load and store operations. */
        src_tail = nir_deref_tail(src_tail);
        dest_tail = nir_deref_tail(dest_tail);

        assert(src_tail->type == dest_tail->type);

        unsigned num_components = glsl_get_vector_elements(src_tail->type);

        nir_intrinsic_instr *load =
            nir_intrinsic_instr_create(mem_ctx, nir_intrinsic_load_var);
        load->num_components = num_components;
        load->variables[0] = nir_deref_as_var(nir_copy_deref(load, &src_head->deref));
        nir_ssa_dest_init(&load->instr, &load->dest, num_components, NULL);

        nir_instr_insert_before(&copy_instr->instr, &load->instr);

        nir_intrinsic_instr *store =
            nir_intrinsic_instr_create(mem_ctx, nir_intrinsic_store_var);
        store->num_components = num_components;
        store->const_index[0] = (1 << num_components) - 1;
        store->variables[0] = nir_deref_as_var(nir_copy_deref(store, &dest_head->deref));

        store->src[0].is_ssa = true;
        store->src[0].ssa = &load->dest.ssa;

        nir_instr_insert_before(&copy_instr->instr, &store->instr);
    }
}
Esempio n. 13
0
static void
lower_instr(nir_intrinsic_instr *instr,
            lower_atomic_state *state)
{
   nir_intrinsic_op op;
   switch (instr->intrinsic) {
   case nir_intrinsic_atomic_counter_read_var:
      op = nir_intrinsic_atomic_counter_read;
      break;

   case nir_intrinsic_atomic_counter_inc_var:
      op = nir_intrinsic_atomic_counter_inc;
      break;

   case nir_intrinsic_atomic_counter_dec_var:
      op = nir_intrinsic_atomic_counter_dec;
      break;

   default:
      return;
   }

   if (instr->variables[0]->var->data.mode != nir_var_uniform &&
       instr->variables[0]->var->data.mode != nir_var_shader_storage)
      return; /* atomics passed as function arguments can't be lowered */

   void *mem_ctx = ralloc_parent(instr);
   unsigned uniform_loc = instr->variables[0]->var->data.location;

   nir_intrinsic_instr *new_instr = nir_intrinsic_instr_create(mem_ctx, op);
   new_instr->const_index[0] =
      state->shader_program->UniformStorage[uniform_loc].opaque[state->shader->stage].index;

   nir_load_const_instr *offset_const = nir_load_const_instr_create(mem_ctx, 1);
   offset_const->value.u[0] = instr->variables[0]->var->data.atomic.offset;

   nir_instr_insert_before(&instr->instr, &offset_const->instr);

   nir_ssa_def *offset_def = &offset_const->def;

   nir_deref *tail = &instr->variables[0]->deref;
   while (tail->child != NULL) {
      assert(tail->child->deref_type == nir_deref_type_array);
      nir_deref_array *deref_array = nir_deref_as_array(tail->child);
      tail = tail->child;

      unsigned child_array_elements = tail->child != NULL ?
         glsl_get_aoa_size(tail->type) : 1;

      offset_const->value.u[0] += deref_array->base_offset *
         child_array_elements * ATOMIC_COUNTER_SIZE;

      if (deref_array->deref_array_type == nir_deref_array_type_indirect) {
         nir_load_const_instr *atomic_counter_size =
               nir_load_const_instr_create(mem_ctx, 1);
         atomic_counter_size->value.u[0] = child_array_elements * ATOMIC_COUNTER_SIZE;
         nir_instr_insert_before(&instr->instr, &atomic_counter_size->instr);

         nir_alu_instr *mul = nir_alu_instr_create(mem_ctx, nir_op_imul);
         nir_ssa_dest_init(&mul->instr, &mul->dest.dest, 1, NULL);
         mul->dest.write_mask = 0x1;
         nir_src_copy(&mul->src[0].src, &deref_array->indirect, mul);
         mul->src[1].src.is_ssa = true;
         mul->src[1].src.ssa = &atomic_counter_size->def;
         nir_instr_insert_before(&instr->instr, &mul->instr);

         nir_alu_instr *add = nir_alu_instr_create(mem_ctx, nir_op_iadd);
         nir_ssa_dest_init(&add->instr, &add->dest.dest, 1, NULL);
         add->dest.write_mask = 0x1;
         add->src[0].src.is_ssa = true;
         add->src[0].src.ssa = &mul->dest.dest.ssa;
         add->src[1].src.is_ssa = true;
         add->src[1].src.ssa = offset_def;
         nir_instr_insert_before(&instr->instr, &add->instr);

         offset_def = &add->dest.dest.ssa;
      }
   }

   new_instr->src[0].is_ssa = true;
   new_instr->src[0].ssa = offset_def;

   if (instr->dest.is_ssa) {
      nir_ssa_dest_init(&new_instr->instr, &new_instr->dest,
                        instr->dest.ssa.num_components, NULL);
      nir_ssa_def_rewrite_uses(&instr->dest.ssa,
                               nir_src_for_ssa(&new_instr->dest.ssa));
   } else {
      nir_dest_copy(&new_instr->dest, &instr->dest, mem_ctx);
   }

   nir_instr_insert_before(&instr->instr, &new_instr->instr);
   nir_instr_remove(&instr->instr);
}
Esempio n. 14
0
/* Gets the deref_node for the given deref chain and creates it if it
 * doesn't yet exist.  If the deref is fully-qualified and direct and
 * state->add_to_direct_deref_nodes is true, it will be added to the hash
 * table of of fully-qualified direct derefs.
 */
static struct deref_node *
get_deref_node(nir_deref_var *deref, struct lower_variables_state *state)
{
   bool is_direct = true;

   /* Start at the base of the chain. */
   struct deref_node *node = get_deref_node_for_var(deref->var, state);
   assert(deref->deref.type == node->type);

   for (nir_deref *tail = deref->deref.child; tail; tail = tail->child) {
      switch (tail->deref_type) {
      case nir_deref_type_struct: {
         nir_deref_struct *deref_struct = nir_deref_as_struct(tail);

         assert(deref_struct->index < glsl_get_length(node->type));

         if (node->children[deref_struct->index] == NULL)
            node->children[deref_struct->index] =
               deref_node_create(node, tail->type, state->dead_ctx);

         node = node->children[deref_struct->index];
         break;
      }

      case nir_deref_type_array: {
         nir_deref_array *arr = nir_deref_as_array(tail);

         switch (arr->deref_array_type) {
         case nir_deref_array_type_direct:
            /* This is possible if a loop unrolls and generates an
             * out-of-bounds offset.  We need to handle this at least
             * somewhat gracefully.
             */
            if (arr->base_offset >= glsl_get_length(node->type))
               return NULL;

            if (node->children[arr->base_offset] == NULL)
               node->children[arr->base_offset] =
                  deref_node_create(node, tail->type, state->dead_ctx);

            node = node->children[arr->base_offset];
            break;

         case nir_deref_array_type_indirect:
            if (node->indirect == NULL)
               node->indirect = deref_node_create(node, tail->type,
                                                  state->dead_ctx);

            node = node->indirect;
            is_direct = false;
            break;

         case nir_deref_array_type_wildcard:
            if (node->wildcard == NULL)
               node->wildcard = deref_node_create(node, tail->type,
                                                  state->dead_ctx);

            node = node->wildcard;
            is_direct = false;
            break;

         default:
            unreachable("Invalid array deref type");
         }
         break;
      }
      default:
         unreachable("Invalid deref type");
      }
   }

   assert(node);

   /* Only insert if it isn't already in the list. */
   if (is_direct && state->add_to_direct_deref_nodes &&
       node->direct_derefs_link.next == NULL) {
      node->deref = deref;
      assert(deref->var != NULL);
      exec_list_push_tail(&state->direct_deref_nodes,
                          &node->direct_derefs_link);
   }

   return node;
}
Esempio n. 15
0
static nir_src
get_deref_reg_src(nir_deref_var *deref, nir_instr *instr,
                  struct locals_to_regs_state *state)
{
   nir_src src;

   src.is_ssa = false;
   src.reg.reg = get_reg_for_deref(deref, state);
   src.reg.base_offset = 0;
   src.reg.indirect = NULL;

   /* It is possible for a user to create a shader that has an array with a
    * single element and then proceed to access it indirectly.  Indirectly
    * accessing a non-array register is not allowed in NIR.  In order to
    * handle this case we just convert it to a direct reference.
    */
   if (src.reg.reg->num_array_elems == 0)
      return src;

   nir_deref *tail = &deref->deref;
   while (tail->child != NULL) {
      const struct glsl_type *parent_type = tail->type;
      tail = tail->child;

      if (tail->deref_type != nir_deref_type_array)
         continue;

      nir_deref_array *deref_array = nir_deref_as_array(tail);

      src.reg.base_offset *= glsl_get_length(parent_type);
      src.reg.base_offset += deref_array->base_offset;

      if (src.reg.indirect) {
         nir_load_const_instr *load_const =
            nir_load_const_instr_create(state->shader, 1, 32);
         load_const->value.u32[0] = glsl_get_length(parent_type);
         nir_instr_insert_before(instr, &load_const->instr);

         nir_alu_instr *mul = nir_alu_instr_create(state->shader, nir_op_imul);
         mul->src[0].src = *src.reg.indirect;
         mul->src[1].src.is_ssa = true;
         mul->src[1].src.ssa = &load_const->def;
         mul->dest.write_mask = 1;
         nir_ssa_dest_init(&mul->instr, &mul->dest.dest, 1, 32, NULL);
         nir_instr_insert_before(instr, &mul->instr);

         src.reg.indirect->is_ssa = true;
         src.reg.indirect->ssa = &mul->dest.dest.ssa;
      }

      if (deref_array->deref_array_type == nir_deref_array_type_indirect) {
         if (src.reg.indirect == NULL) {
            src.reg.indirect = ralloc(state->shader, nir_src);
            nir_src_copy(src.reg.indirect, &deref_array->indirect,
                         state->shader);
         } else {
            nir_alu_instr *add = nir_alu_instr_create(state->shader,
                                                      nir_op_iadd);
            add->src[0].src = *src.reg.indirect;
            nir_src_copy(&add->src[1].src, &deref_array->indirect, add);
            add->dest.write_mask = 1;
            nir_ssa_dest_init(&add->instr, &add->dest.dest, 1, 32, NULL);
            nir_instr_insert_before(instr, &add->instr);

            src.reg.indirect->is_ssa = true;
            src.reg.indirect->ssa = &add->dest.dest.ssa;
         }
      }
   }

   return src;
}
Esempio n. 16
0
static void
lower_instr(nir_intrinsic_instr *instr, nir_function_impl *impl)
{
   nir_intrinsic_op op;
   switch (instr->intrinsic) {
   case nir_intrinsic_atomic_counter_read_var:
      op = nir_intrinsic_atomic_counter_read;
      break;

   case nir_intrinsic_atomic_counter_inc_var:
      op = nir_intrinsic_atomic_counter_inc;
      break;

   case nir_intrinsic_atomic_counter_dec_var:
      op = nir_intrinsic_atomic_counter_dec;
      break;

   default:
      return;
   }

   if (instr->variables[0]->var->data.mode != nir_var_uniform)
      return; /* atomics passed as function arguments can't be lowered */

   void *mem_ctx = ralloc_parent(instr);

   nir_intrinsic_instr *new_instr = nir_intrinsic_instr_create(mem_ctx, op);
   new_instr->const_index[0] =
      (int) instr->variables[0]->var->data.atomic.buffer_index;

   nir_load_const_instr *offset_const = nir_load_const_instr_create(mem_ctx, 1);
   offset_const->value.u[0] = instr->variables[0]->var->data.atomic.offset;

   nir_instr_insert_before(&instr->instr, &offset_const->instr);

   nir_ssa_def *offset_def = &offset_const->def;

   if (instr->variables[0]->deref.child != NULL) {
      assert(instr->variables[0]->deref.child->deref_type ==
             nir_deref_type_array);
      nir_deref_array *deref_array =
         nir_deref_as_array(instr->variables[0]->deref.child);
      assert(deref_array->deref.child == NULL);

      offset_const->value.u[0] +=
         deref_array->base_offset * ATOMIC_COUNTER_SIZE;

      if (deref_array->deref_array_type == nir_deref_array_type_indirect) {
         nir_load_const_instr *atomic_counter_size =
               nir_load_const_instr_create(mem_ctx, 1);
         atomic_counter_size->value.u[0] = ATOMIC_COUNTER_SIZE;
         nir_instr_insert_before(&instr->instr, &atomic_counter_size->instr);

         nir_alu_instr *mul = nir_alu_instr_create(mem_ctx, nir_op_imul);
         nir_ssa_dest_init(&mul->instr, &mul->dest.dest, 1, NULL);
         mul->dest.write_mask = 0x1;
         nir_src_copy(&mul->src[0].src, &deref_array->indirect, mem_ctx);
         mul->src[1].src.is_ssa = true;
         mul->src[1].src.ssa = &atomic_counter_size->def;
         nir_instr_insert_before(&instr->instr, &mul->instr);

         nir_alu_instr *add = nir_alu_instr_create(mem_ctx, nir_op_iadd);
         nir_ssa_dest_init(&add->instr, &add->dest.dest, 1, NULL);
         add->dest.write_mask = 0x1;
         add->src[0].src.is_ssa = true;
         add->src[0].src.ssa = &mul->dest.dest.ssa;
         add->src[1].src.is_ssa = true;
         add->src[1].src.ssa = &offset_const->def;
         nir_instr_insert_before(&instr->instr, &add->instr);

         offset_def = &add->dest.dest.ssa;
      }
   }

   new_instr->src[0].is_ssa = true;
   new_instr->src[0].ssa = offset_def;;

   if (instr->dest.is_ssa) {
      nir_ssa_dest_init(&new_instr->instr, &new_instr->dest,
                        instr->dest.ssa.num_components, NULL);
      nir_ssa_def_rewrite_uses(&instr->dest.ssa,
                               nir_src_for_ssa(&new_instr->dest.ssa),
                               mem_ctx);
   } else {
      nir_dest_copy(&new_instr->dest, &instr->dest, mem_ctx);
   }

   nir_instr_insert_before(&instr->instr, &new_instr->instr);
   nir_instr_remove(&instr->instr);
}
Esempio n. 17
0
static void
lower_sampler(nir_tex_instr *instr, struct gl_shader_program *shader_program,
              const struct gl_program *prog, void *mem_ctx)
{
   if (instr->sampler == NULL)
      return;

   /* Get the name and the offset */
   instr->sampler_index = 0;
   bool has_indirect = false;
   char *name = ralloc_strdup(mem_ctx, instr->sampler->var->name);

   for (nir_deref *deref = &instr->sampler->deref;
        deref->child; deref = deref->child) {
      switch (deref->child->deref_type) {
      case nir_deref_type_array: {
         nir_deref_array *deref_array = nir_deref_as_array(deref->child);

         /* XXX: We're assuming here that the indirect is the last array
          * thing we have.  This should be ok for now as we don't support
          * arrays_of_arrays yet.
          */
         assert(!has_indirect);

         instr->sampler_index *= glsl_get_length(deref->type);
         switch (deref_array->deref_array_type) {
         case nir_deref_array_type_direct:
            instr->sampler_index += deref_array->base_offset;
            if (deref_array->deref.child)
               ralloc_asprintf_append(&name, "[%u]", deref_array->base_offset);
            break;
         case nir_deref_array_type_indirect: {
            assert(!has_indirect);

            instr->src = reralloc(mem_ctx, instr->src, nir_tex_src,
                                  instr->num_srcs + 1);
            memset(&instr->src[instr->num_srcs], 0, sizeof *instr->src);
            instr->src[instr->num_srcs].src_type = nir_tex_src_sampler_offset;
            instr->num_srcs++;

            nir_instr_rewrite_src(&instr->instr,
                                  &instr->src[instr->num_srcs - 1].src,
                                  deref_array->indirect);

            instr->sampler_array_size = glsl_get_length(deref->type);

            nir_src empty;
            memset(&empty, 0, sizeof empty);
            nir_instr_rewrite_src(&instr->instr, &deref_array->indirect, empty);

            if (deref_array->deref.child)
               ralloc_strcat(&name, "[0]");
            break;
         }

         case nir_deref_array_type_wildcard:
            unreachable("Cannot copy samplers");
         default:
            unreachable("Invalid deref array type");
         }
         break;
      }

      case nir_deref_type_struct: {
         nir_deref_struct *deref_struct = nir_deref_as_struct(deref->child);
         const char *field = glsl_get_struct_elem_name(deref->type,
                                                       deref_struct->index);
         ralloc_asprintf_append(&name, ".%s", field);
         break;
      }

      default:
         unreachable("Invalid deref type");
         break;
      }
   }

   instr->sampler_index += get_sampler_index(shader_program, name, prog);

   instr->sampler = NULL;
}
static void
emit_indirect_load_store(nir_builder *b, nir_intrinsic_instr *orig_instr,
                         nir_deref_var *deref, nir_deref *arr_parent,
                         int start, int end,
                         nir_ssa_def **dest, nir_ssa_def *src)
{
   assert(arr_parent->child &&
          arr_parent->child->deref_type == nir_deref_type_array);
   nir_deref_array *arr = nir_deref_as_array(arr_parent->child);
   assert(arr->deref_array_type == nir_deref_array_type_indirect);
   assert(arr->indirect.is_ssa);

   assert(start < end);
   if (start == end - 1) {
      /* Base case.  Just emit the load/store op */
      nir_deref_array direct = *arr;
      direct.deref_array_type = nir_deref_array_type_direct;
      direct.base_offset += start;
      direct.indirect = NIR_SRC_INIT;

      arr_parent->child = &direct.deref;
      emit_load_store(b, orig_instr, deref, &arr->deref, dest, src);
      arr_parent->child = &arr->deref;
   } else {
      int mid = start + (end - start) / 2;

      nir_ssa_def *then_dest, *else_dest;

      nir_if *if_stmt = nir_if_create(b->shader);
      if_stmt->condition = nir_src_for_ssa(nir_ilt(b, arr->indirect.ssa,
                                                      nir_imm_int(b, mid)));
      nir_cf_node_insert(b->cursor, &if_stmt->cf_node);

      b->cursor = nir_after_cf_list(&if_stmt->then_list);
      emit_indirect_load_store(b, orig_instr, deref, arr_parent,
                               start, mid, &then_dest, src);

      b->cursor = nir_after_cf_list(&if_stmt->else_list);
      emit_indirect_load_store(b, orig_instr, deref, arr_parent,
                               mid, end, &else_dest, src);

      b->cursor = nir_after_cf_node(&if_stmt->cf_node);

      if (src == NULL) {
         /* We're a load.  We need to insert a phi node */
         nir_phi_instr *phi = nir_phi_instr_create(b->shader);
         unsigned bit_size = then_dest->bit_size;
         nir_ssa_dest_init(&phi->instr, &phi->dest,
                           then_dest->num_components, bit_size, NULL);

         nir_phi_src *src0 = ralloc(phi, nir_phi_src);
         src0->pred = nir_cf_node_as_block(nir_if_last_then_node(if_stmt));
         src0->src = nir_src_for_ssa(then_dest);
         exec_list_push_tail(&phi->srcs, &src0->node);

         nir_phi_src *src1 = ralloc(phi, nir_phi_src);
         src1->pred = nir_cf_node_as_block(nir_if_last_else_node(if_stmt));
         src1->src = nir_src_for_ssa(else_dest);
         exec_list_push_tail(&phi->srcs, &src1->node);

         nir_builder_instr_insert(b, &phi->instr);
         *dest = &phi->dest.ssa;
      }
   }
}
Esempio n. 19
0
static unsigned
get_io_offset(nir_deref_var *deref, nir_instr *instr, nir_src *indirect,
              struct lower_io_state *state)
{
   bool found_indirect = false;
   unsigned base_offset = 0;

   nir_deref *tail = &deref->deref;
   while (tail->child != NULL) {
      const struct glsl_type *parent_type = tail->type;
      tail = tail->child;

      if (tail->deref_type == nir_deref_type_array) {
         nir_deref_array *deref_array = nir_deref_as_array(tail);
         unsigned size = type_size(tail->type);

         base_offset += size * deref_array->base_offset;

         if (deref_array->deref_array_type == nir_deref_array_type_indirect) {
            nir_load_const_instr *load_const =
               nir_load_const_instr_create(state->mem_ctx, 1);
            load_const->value.u[0] = size;
            nir_instr_insert_before(instr, &load_const->instr);

            nir_alu_instr *mul = nir_alu_instr_create(state->mem_ctx,
                                                      nir_op_imul);
            mul->src[0].src.is_ssa = true;
            mul->src[0].src.ssa = &load_const->def;
            nir_src_copy(&mul->src[1].src, &deref_array->indirect,
                         state->mem_ctx);
            mul->dest.write_mask = 1;
            nir_ssa_dest_init(&mul->instr, &mul->dest.dest, 1, NULL);
            nir_instr_insert_before(instr, &mul->instr);

            if (found_indirect) {
               nir_alu_instr *add = nir_alu_instr_create(state->mem_ctx,
                                                         nir_op_iadd);
               add->src[0].src = *indirect;
               add->src[1].src.is_ssa = true;
               add->src[1].src.ssa = &mul->dest.dest.ssa;
               add->dest.write_mask = 1;
               nir_ssa_dest_init(&add->instr, &add->dest.dest, 1, NULL);
               nir_instr_insert_before(instr, &add->instr);

               indirect->is_ssa = true;
               indirect->ssa = &add->dest.dest.ssa;
            } else {
               indirect->is_ssa = true;
               indirect->ssa = &mul->dest.dest.ssa;
               found_indirect = true;
            }
         }
      } else if (tail->deref_type == nir_deref_type_struct) {
         nir_deref_struct *deref_struct = nir_deref_as_struct(tail);

         for (unsigned i = 0; i < deref_struct->index; i++)
            base_offset += type_size(glsl_get_struct_field(parent_type, i));
      }
   }

   return base_offset;
}