Esempio n. 1
0
/**
 * Recursively resolves an IDENTIFIER to a parameter into its actual value,
 * by looking it up in the global_param_table_sc
 * Also try and fold any BINARY_OPERATIONs now that an IDENTIFIER has been
 * resolved
 */
ast_node_t *resolve_node(char *module_name, ast_node_t *node)
{
    if (node)
    {
        long sc_spot;
        int i;
        info_ast_visit_t *node_details;
        STRING_CACHE *local_param_table_sc;

        ast_node_t *node_copy;
        node_copy = (ast_node_t *)malloc(sizeof(ast_node_t));
        memcpy(node_copy, node, sizeof(ast_node_t));
        node_copy->children = malloc(sizeof(ast_node_t*) * node_copy->num_children);

        for (i = 0; i < node->num_children; i++)
        {
            node_copy->children[i] = resolve_node(module_name, node->children[i]);
        }

        switch (node->type)
        {
        case IDENTIFIERS:
            oassert(module_name);
            sc_spot = sc_lookup_string(global_param_table_sc, module_name);
            oassert(sc_spot != -1);
            local_param_table_sc = (STRING_CACHE *)global_param_table_sc->data[sc_spot];
            sc_spot = sc_lookup_string(local_param_table_sc, node->types.identifier);
            if (sc_spot != -1)
            {
                node = ((ast_node_t *)local_param_table_sc->data[sc_spot]);
                oassert(node->type == NUMBERS);
            }
            break;

        case BINARY_OPERATION:
            node_copy->shared_node = TRUE;
            node_details = constantFold(node_copy);
            node_copy->shared_node = FALSE;
            if (node_details && node_details->is_constant_folded == TRUE)
            {
                node = node_details->from;
                free(node_details);
                oassert(node->type == NUMBERS);
            }
            break;

        default:
            break;
        }

        free(node_copy->children);
        free(node_copy);
    }
    return node;
}
Esempio n. 2
0
  UTI NodeUnaryOp::checkAndLabelType()
  {
    assert(m_node);
    UTI uti = m_node->checkAndLabelType();

    if(m_state.isComplete(uti) && !m_state.isScalar(uti)) //array unsupported at this time
      {
	std::ostringstream msg;
	msg << "Incompatible (nonscalar) type: ";
	msg << m_state.getUlamTypeNameBriefByIndex(uti).c_str();
	msg << ", for unary " << getName();
	MSG(getNodeLocationAsString().c_str(), msg.str().c_str(), ERR);
	setNodeType(Nav);
	return Nav;
      }

    //replace node with func call to matching function overload operator for class
    // of left, with no arguments for unary (t41???);
    // quark toInt must be used on rhs of operators (t3191, t3200, t3513, t3648,9)
    UlamType * ut = m_state.getUlamTypeByIndex(uti);
    if((ut->getUlamTypeEnum() == Class))
      {
	Node * newnode = buildOperatorOverloadFuncCallNode();
	if(newnode)
	  {
	    AssertBool swapOk = Node::exchangeNodeWithParent(newnode);
	    assert(swapOk);

	    m_node = NULL; //recycle as memberselect

	    delete this; //suicide is painless..

	    return newnode->checkAndLabelType();
	  }
	//else should fail again as non-primitive;
      } //done

    UTI newType = Nav;
    if(uti != Nav)
      newType = calcNodeType(uti); //does safety check

    if(m_state.isComplete(newType))
      {
	if(UlamType::compareForMakingCastingNode(newType, uti, m_state) != UTIC_SAME) //not same|dontknow
	  {
	    if(!Node::makeCastingNode(m_node, newType, m_node))
	      newType = Nav;
	  }
      }
    else
      newType = Hzy;

    setNodeType(newType);
    if(newType == Hzy) m_state.setGoAgain(); //since not error
    Node::setStoreIntoAble(TBOOL_FALSE);

    if((newType != Nav) && isAConstant() && m_node->isReadyConstant())
      return constantFold();

    return newType;
  } //checkAndLabelType