static tree build_equiv_decl (tree union_type, bool is_init) { tree decl; if (is_init) { decl = gfc_create_var (union_type, "equiv"); TREE_STATIC (decl) = 1; return decl; } decl = build_decl (VAR_DECL, NULL, union_type); DECL_ARTIFICIAL (decl) = 1; DECL_COMMON (decl) = 1; TREE_ADDRESSABLE (decl) = 1; TREE_USED (decl) = 1; /* The source location has been lost, and doesn't really matter. We need to set it to something though. */ gfc_set_decl_location (decl, &gfc_current_locus); gfc_add_decl_to_function (decl); return decl; }
static tree build_equiv_decl (tree union_type, bool is_init) { tree decl; char name[15]; static int serial = 0; if (is_init) { decl = gfc_create_var (union_type, "equiv"); TREE_STATIC (decl) = 1; return decl; } snprintf (name, sizeof (name), "equiv.%d", serial++); decl = build_decl (VAR_DECL, get_identifier (name), union_type); DECL_ARTIFICIAL (decl) = 1; DECL_IGNORED_P (decl) = 1; if (!gfc_can_put_var_on_stack (DECL_SIZE_UNIT (decl))) TREE_STATIC (decl) = 1; TREE_ADDRESSABLE (decl) = 1; TREE_USED (decl) = 1; /* The source location has been lost, and doesn't really matter. We need to set it to something though. */ gfc_set_decl_location (decl, &gfc_current_locus); gfc_add_decl_to_function (decl); return decl; }
void gfc_trans_runtime_check (bool error, bool once, tree cond, stmtblock_t * pblock, locus * where, const char * msgid, ...) { va_list ap; stmtblock_t block; tree body; tree tmp; tree tmpvar = NULL; if (integer_zerop (cond)) return; if (once) { tmpvar = gfc_create_var (boolean_type_node, "print_warning"); TREE_STATIC (tmpvar) = 1; DECL_INITIAL (tmpvar) = boolean_true_node; gfc_add_expr_to_block (pblock, tmpvar); } gfc_start_block (&block); /* The code to generate the error. */ va_start (ap, msgid); gfc_add_expr_to_block (&block, trans_runtime_error_vararg (error, where, msgid, ap)); if (once) gfc_add_modify (&block, tmpvar, boolean_false_node); body = gfc_finish_block (&block); if (integer_onep (cond)) { gfc_add_expr_to_block (pblock, body); } else { /* Tell the compiler that this isn't likely. */ if (once) cond = fold_build2_loc (where->lb->location, TRUTH_AND_EXPR, long_integer_type_node, tmpvar, cond); else cond = fold_convert (long_integer_type_node, cond); tmp = build_int_cst (long_integer_type_node, 0); cond = build_call_expr_loc (where->lb->location, built_in_decls[BUILT_IN_EXPECT], 2, cond, tmp); cond = fold_convert (boolean_type_node, cond); tmp = fold_build3_loc (where->lb->location, COND_EXPR, void_type_node, cond, body, build_empty_stmt (where->lb->location)); gfc_add_expr_to_block (pblock, tmp); } }
/* Reallocate MEM so it has SIZE bytes of data. This behaves like the following pseudo-code: void * internal_realloc (void *mem, size_t size) { if (size < 0) runtime_error ("Attempt to allocate a negative amount of memory."); res = realloc (mem, size); if (!res && size != 0) _gfortran_os_error ("Out of memory"); if (size == 0) return NULL; return res; } */ tree gfc_call_realloc (stmtblock_t * block, tree mem, tree size) { tree msg, res, negative, nonzero, zero, null_result, tmp; tree type = TREE_TYPE (mem); size = gfc_evaluate_now (size, block); if (TREE_TYPE (size) != TREE_TYPE (size_type_node)) size = fold_convert (size_type_node, size); /* Create a variable to hold the result. */ res = gfc_create_var (type, NULL); /* size < 0 ? */ negative = fold_build2 (LT_EXPR, boolean_type_node, size, build_int_cst (size_type_node, 0)); msg = gfc_build_addr_expr (pchar_type_node, gfc_build_localized_cstring_const ("Attempt to allocate a negative amount of memory.")); tmp = fold_build3 (COND_EXPR, void_type_node, negative, build_call_expr_loc (input_location, gfor_fndecl_runtime_error, 1, msg), build_empty_stmt (input_location)); gfc_add_expr_to_block (block, tmp); /* Call realloc and check the result. */ tmp = build_call_expr_loc (input_location, built_in_decls[BUILT_IN_REALLOC], 2, fold_convert (pvoid_type_node, mem), size); gfc_add_modify (block, res, fold_convert (type, tmp)); null_result = fold_build2 (EQ_EXPR, boolean_type_node, res, build_int_cst (pvoid_type_node, 0)); nonzero = fold_build2 (NE_EXPR, boolean_type_node, size, build_int_cst (size_type_node, 0)); null_result = fold_build2 (TRUTH_AND_EXPR, boolean_type_node, null_result, nonzero); msg = gfc_build_addr_expr (pchar_type_node, gfc_build_localized_cstring_const ("Out of memory")); tmp = fold_build3 (COND_EXPR, void_type_node, null_result, build_call_expr_loc (input_location, gfor_fndecl_os_error, 1, msg), build_empty_stmt (input_location)); gfc_add_expr_to_block (block, tmp); /* if (size == 0) then the result is NULL. */ tmp = fold_build2 (MODIFY_EXPR, type, res, build_int_cst (type, 0)); zero = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, nonzero); tmp = fold_build3 (COND_EXPR, void_type_node, zero, tmp, build_empty_stmt (input_location)); gfc_add_expr_to_block (block, tmp); return res; }
tree gfc_evaluate_now (tree expr, stmtblock_t * pblock) { tree var; if (CONSTANT_CLASS_P (expr)) return expr; var = gfc_create_var (TREE_TYPE (expr), NULL); gfc_add_modify_expr (pblock, var, expr); return var; }
/* Call malloc to allocate size bytes of memory, with special conditions: + if size == 0, return a malloced area of size 1, + if malloc returns NULL, issue a runtime error. */ tree gfc_call_malloc (stmtblock_t * block, tree type, tree size) { tree tmp, msg, malloc_result, null_result, res, malloc_tree; stmtblock_t block2; size = gfc_evaluate_now (size, block); if (TREE_TYPE (size) != TREE_TYPE (size_type_node)) size = fold_convert (size_type_node, size); /* Create a variable to hold the result. */ res = gfc_create_var (prvoid_type_node, NULL); /* Call malloc. */ gfc_start_block (&block2); size = fold_build2_loc (input_location, MAX_EXPR, size_type_node, size, build_int_cst (size_type_node, 1)); malloc_tree = builtin_decl_explicit (BUILT_IN_MALLOC); gfc_add_modify (&block2, res, fold_convert (prvoid_type_node, build_call_expr_loc (input_location, malloc_tree, 1, size))); /* Optionally check whether malloc was successful. */ if (gfc_option.rtcheck & GFC_RTCHECK_MEM) { null_result = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, res, build_int_cst (pvoid_type_node, 0)); msg = gfc_build_addr_expr (pchar_type_node, gfc_build_localized_cstring_const ("Memory allocation failed")); tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, null_result, build_call_expr_loc (input_location, gfor_fndecl_os_error, 1, msg), build_empty_stmt (input_location)); gfc_add_expr_to_block (&block2, tmp); } malloc_result = gfc_finish_block (&block2); gfc_add_expr_to_block (block, malloc_result); if (type != NULL) res = fold_convert (type, res); return res; }
/* Reallocate MEM so it has SIZE bytes of data. This behaves like the following pseudo-code: void * internal_realloc (void *mem, size_t size) { res = realloc (mem, size); if (!res && size != 0) _gfortran_os_error ("Allocation would exceed memory limit"); if (size == 0) return NULL; return res; } */ tree gfc_call_realloc (stmtblock_t * block, tree mem, tree size) { tree msg, res, nonzero, zero, null_result, tmp; tree type = TREE_TYPE (mem); size = gfc_evaluate_now (size, block); if (TREE_TYPE (size) != TREE_TYPE (size_type_node)) size = fold_convert (size_type_node, size); /* Create a variable to hold the result. */ res = gfc_create_var (type, NULL); /* Call realloc and check the result. */ tmp = build_call_expr_loc (input_location, builtin_decl_explicit (BUILT_IN_REALLOC), 2, fold_convert (pvoid_type_node, mem), size); gfc_add_modify (block, res, fold_convert (type, tmp)); null_result = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, res, build_int_cst (pvoid_type_node, 0)); nonzero = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, size, build_int_cst (size_type_node, 0)); null_result = fold_build2_loc (input_location, TRUTH_AND_EXPR, boolean_type_node, null_result, nonzero); msg = gfc_build_addr_expr (pchar_type_node, gfc_build_localized_cstring_const ("Allocation would exceed memory limit")); tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, null_result, build_call_expr_loc (input_location, gfor_fndecl_os_error, 1, msg), build_empty_stmt (input_location)); gfc_add_expr_to_block (block, tmp); /* if (size == 0) then the result is NULL. */ tmp = fold_build2_loc (input_location, MODIFY_EXPR, type, res, build_int_cst (type, 0)); zero = fold_build1_loc (input_location, TRUTH_NOT_EXPR, boolean_type_node, nonzero); tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, zero, tmp, build_empty_stmt (input_location)); gfc_add_expr_to_block (block, tmp); return res; }
static tree gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock, gfc_omp_clauses *do_clauses) { gfc_se se; tree dovar, stmt, from, to, step, type, init, cond, incr; tree count = NULL_TREE, cycle_label, tmp, omp_clauses; stmtblock_t block; stmtblock_t body; int simple = 0; bool dovar_found = false; gfc_omp_clauses *clauses = code->ext.omp_clauses; code = code->block->next; gcc_assert (code->op == EXEC_DO); if (pblock == NULL) { gfc_start_block (&block); pblock = █ } omp_clauses = gfc_trans_omp_clauses (pblock, do_clauses, code->loc); if (clauses) { gfc_namelist *n; for (n = clauses->lists[OMP_LIST_LASTPRIVATE]; n != NULL; n = n->next) if (code->ext.iterator->var->symtree->n.sym == n->sym) break; if (n == NULL) for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next) if (code->ext.iterator->var->symtree->n.sym == n->sym) break; if (n != NULL) dovar_found = true; } /* Evaluate all the expressions in the iterator. */ gfc_init_se (&se, NULL); gfc_conv_expr_lhs (&se, code->ext.iterator->var); gfc_add_block_to_block (pblock, &se.pre); dovar = se.expr; type = TREE_TYPE (dovar); gcc_assert (TREE_CODE (type) == INTEGER_TYPE); gfc_init_se (&se, NULL); gfc_conv_expr_val (&se, code->ext.iterator->start); gfc_add_block_to_block (pblock, &se.pre); from = gfc_evaluate_now (se.expr, pblock); gfc_init_se (&se, NULL); gfc_conv_expr_val (&se, code->ext.iterator->end); gfc_add_block_to_block (pblock, &se.pre); to = gfc_evaluate_now (se.expr, pblock); gfc_init_se (&se, NULL); gfc_conv_expr_val (&se, code->ext.iterator->step); gfc_add_block_to_block (pblock, &se.pre); step = gfc_evaluate_now (se.expr, pblock); /* Special case simple loops. */ if (integer_onep (step)) simple = 1; else if (tree_int_cst_equal (step, integer_minus_one_node)) simple = -1; /* Loop body. */ if (simple) { init = build2_v (MODIFY_EXPR, dovar, from); cond = build2 (simple > 0 ? LE_EXPR : GE_EXPR, boolean_type_node, dovar, to); incr = fold_build2 (PLUS_EXPR, type, dovar, step); incr = fold_build2 (MODIFY_EXPR, type, dovar, incr); if (pblock != &block) { pushlevel (0); gfc_start_block (&block); } gfc_start_block (&body); } else { /* STEP is not 1 or -1. Use: for (count = 0; count < (to + step - from) / step; count++) { dovar = from + count * step; body; cycle_label:; } */ tmp = fold_build2 (MINUS_EXPR, type, step, from); tmp = fold_build2 (PLUS_EXPR, type, to, tmp); tmp = fold_build2 (TRUNC_DIV_EXPR, type, tmp, step); tmp = gfc_evaluate_now (tmp, pblock); count = gfc_create_var (type, "count"); init = build2_v (MODIFY_EXPR, count, build_int_cst (type, 0)); cond = build2 (LT_EXPR, boolean_type_node, count, tmp); incr = fold_build2 (PLUS_EXPR, type, count, build_int_cst (type, 1)); incr = fold_build2 (MODIFY_EXPR, type, count, incr); if (pblock != &block) { pushlevel (0); gfc_start_block (&block); } gfc_start_block (&body); /* Initialize DOVAR. */ tmp = fold_build2 (MULT_EXPR, type, count, step); tmp = build2 (PLUS_EXPR, type, from, tmp); gfc_add_modify_expr (&body, dovar, tmp); } if (!dovar_found) { tmp = build_omp_clause (OMP_CLAUSE_PRIVATE); OMP_CLAUSE_DECL (tmp) = dovar; omp_clauses = gfc_trans_add_clause (tmp, omp_clauses); } if (!simple) { tmp = build_omp_clause (OMP_CLAUSE_PRIVATE); OMP_CLAUSE_DECL (tmp) = count; omp_clauses = gfc_trans_add_clause (tmp, omp_clauses); } /* Cycle statement is implemented with a goto. Exit statement must not be present for this loop. */ cycle_label = gfc_build_label_decl (NULL_TREE); /* Put these labels where they can be found later. We put the labels in a TREE_LIST node (because TREE_CHAIN is already used). cycle_label goes in TREE_PURPOSE (backend_decl), exit label in TREE_VALUE (backend_decl). */ code->block->backend_decl = tree_cons (cycle_label, NULL, NULL); /* Main loop body. */ tmp = gfc_trans_omp_code (code->block->next, true); gfc_add_expr_to_block (&body, tmp); /* Label for cycle statements (if needed). */ if (TREE_USED (cycle_label)) { tmp = build1_v (LABEL_EXPR, cycle_label); gfc_add_expr_to_block (&body, tmp); } /* End of loop body. */ stmt = make_node (OMP_FOR); TREE_TYPE (stmt) = void_type_node; OMP_FOR_BODY (stmt) = gfc_finish_block (&body); OMP_FOR_CLAUSES (stmt) = omp_clauses; OMP_FOR_INIT (stmt) = init; OMP_FOR_COND (stmt) = cond; OMP_FOR_INCR (stmt) = incr; gfc_add_expr_to_block (&block, stmt); return gfc_finish_block (&block); }
static tree gfc_trans_omp_atomic (gfc_code *code) { gfc_se lse; gfc_se rse; gfc_expr *expr2, *e; gfc_symbol *var; stmtblock_t block; tree lhsaddr, type, rhs, x; enum tree_code op = ERROR_MARK; bool var_on_left = false; code = code->block->next; gcc_assert (code->op == EXEC_ASSIGN); gcc_assert (code->next == NULL); var = code->expr->symtree->n.sym; gfc_init_se (&lse, NULL); gfc_init_se (&rse, NULL); gfc_start_block (&block); gfc_conv_expr (&lse, code->expr); gfc_add_block_to_block (&block, &lse.pre); type = TREE_TYPE (lse.expr); lhsaddr = gfc_build_addr_expr (NULL, lse.expr); expr2 = code->expr2; if (expr2->expr_type == EXPR_FUNCTION && expr2->value.function.isym->generic_id == GFC_ISYM_CONVERSION) expr2 = expr2->value.function.actual->expr; if (expr2->expr_type == EXPR_OP) { gfc_expr *e; switch (expr2->value.op.operator) { case INTRINSIC_PLUS: op = PLUS_EXPR; break; case INTRINSIC_TIMES: op = MULT_EXPR; break; case INTRINSIC_MINUS: op = MINUS_EXPR; break; case INTRINSIC_DIVIDE: if (expr2->ts.type == BT_INTEGER) op = TRUNC_DIV_EXPR; else op = RDIV_EXPR; break; case INTRINSIC_AND: op = TRUTH_ANDIF_EXPR; break; case INTRINSIC_OR: op = TRUTH_ORIF_EXPR; break; case INTRINSIC_EQV: op = EQ_EXPR; break; case INTRINSIC_NEQV: op = NE_EXPR; break; default: gcc_unreachable (); } e = expr2->value.op.op1; if (e->expr_type == EXPR_FUNCTION && e->value.function.isym->generic_id == GFC_ISYM_CONVERSION) e = e->value.function.actual->expr; if (e->expr_type == EXPR_VARIABLE && e->symtree != NULL && e->symtree->n.sym == var) { expr2 = expr2->value.op.op2; var_on_left = true; } else { e = expr2->value.op.op2; if (e->expr_type == EXPR_FUNCTION && e->value.function.isym->generic_id == GFC_ISYM_CONVERSION) e = e->value.function.actual->expr; gcc_assert (e->expr_type == EXPR_VARIABLE && e->symtree != NULL && e->symtree->n.sym == var); expr2 = expr2->value.op.op1; var_on_left = false; } gfc_conv_expr (&rse, expr2); gfc_add_block_to_block (&block, &rse.pre); } else { gcc_assert (expr2->expr_type == EXPR_FUNCTION); switch (expr2->value.function.isym->generic_id) { case GFC_ISYM_MIN: op = MIN_EXPR; break; case GFC_ISYM_MAX: op = MAX_EXPR; break; case GFC_ISYM_IAND: op = BIT_AND_EXPR; break; case GFC_ISYM_IOR: op = BIT_IOR_EXPR; break; case GFC_ISYM_IEOR: op = BIT_XOR_EXPR; break; default: gcc_unreachable (); } e = expr2->value.function.actual->expr; gcc_assert (e->expr_type == EXPR_VARIABLE && e->symtree != NULL && e->symtree->n.sym == var); gfc_conv_expr (&rse, expr2->value.function.actual->next->expr); gfc_add_block_to_block (&block, &rse.pre); if (expr2->value.function.actual->next->next != NULL) { tree accum = gfc_create_var (TREE_TYPE (rse.expr), NULL); gfc_actual_arglist *arg; gfc_add_modify_expr (&block, accum, rse.expr); for (arg = expr2->value.function.actual->next->next; arg; arg = arg->next) { gfc_init_block (&rse.pre); gfc_conv_expr (&rse, arg->expr); gfc_add_block_to_block (&block, &rse.pre); x = fold_build2 (op, TREE_TYPE (accum), accum, rse.expr); gfc_add_modify_expr (&block, accum, x); } rse.expr = accum; } expr2 = expr2->value.function.actual->next->expr; } lhsaddr = save_expr (lhsaddr); rhs = gfc_evaluate_now (rse.expr, &block); x = convert (TREE_TYPE (rhs), build_fold_indirect_ref (lhsaddr)); if (var_on_left) x = fold_build2 (op, TREE_TYPE (rhs), x, rhs); else x = fold_build2 (op, TREE_TYPE (rhs), rhs, x); if (TREE_CODE (TREE_TYPE (rhs)) == COMPLEX_TYPE && TREE_CODE (type) != COMPLEX_TYPE) x = build1 (REALPART_EXPR, TREE_TYPE (TREE_TYPE (rhs)), x); x = build2_v (OMP_ATOMIC, lhsaddr, convert (type, x)); gfc_add_expr_to_block (&block, x); gfc_add_block_to_block (&block, &lse.pre); gfc_add_block_to_block (&block, &rse.pre); return gfc_finish_block (&block); }
/* Generate code for an ALLOCATE statement when the argument is an allocatable array. If the array is currently allocated, it is an error to allocate it again. This function follows the following pseudo-code: void * allocate_array (void *mem, size_t size, integer_type *stat) { if (mem == NULL) return allocate (size, stat); else { if (stat) { free (mem); mem = allocate (size, stat); *stat = LIBERROR_ALLOCATION; return mem; } else runtime_error ("Attempting to allocate already allocated variable"); } } expr must be set to the original expression being allocated for its locus and variable name in case a runtime error has to be printed. */ tree gfc_allocate_array_with_status (stmtblock_t * block, tree mem, tree size, tree status, gfc_expr* expr) { stmtblock_t alloc_block; tree res, tmp, null_mem, alloc, error; tree type = TREE_TYPE (mem); if (TREE_TYPE (size) != TREE_TYPE (size_type_node)) size = fold_convert (size_type_node, size); /* Create a variable to hold the result. */ res = gfc_create_var (type, NULL); null_mem = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, mem, build_int_cst (type, 0)); /* If mem is NULL, we call gfc_allocate_with_status. */ gfc_start_block (&alloc_block); tmp = gfc_allocate_with_status (&alloc_block, size, status); gfc_add_modify (&alloc_block, res, fold_convert (type, tmp)); alloc = gfc_finish_block (&alloc_block); /* Otherwise, we issue a runtime error or set the status variable. */ if (expr) { tree varname; gcc_assert (expr->expr_type == EXPR_VARIABLE && expr->symtree); varname = gfc_build_cstring_const (expr->symtree->name); varname = gfc_build_addr_expr (pchar_type_node, varname); error = gfc_trans_runtime_error (true, &expr->where, "Attempting to allocate already" " allocated variable '%s'", varname); } else error = gfc_trans_runtime_error (true, NULL, "Attempting to allocate already allocated" " variable"); if (status != NULL_TREE && !integer_zerop (status)) { tree status_type = TREE_TYPE (TREE_TYPE (status)); stmtblock_t set_status_block; gfc_start_block (&set_status_block); tmp = build_call_expr_loc (input_location, built_in_decls[BUILT_IN_FREE], 1, fold_convert (pvoid_type_node, mem)); gfc_add_expr_to_block (&set_status_block, tmp); tmp = gfc_allocate_with_status (&set_status_block, size, status); gfc_add_modify (&set_status_block, res, fold_convert (type, tmp)); gfc_add_modify (&set_status_block, fold_build1_loc (input_location, INDIRECT_REF, status_type, status), build_int_cst (status_type, LIBERROR_ALLOCATION)); tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, status, build_int_cst (status_type, 0)); error = fold_build3_loc (input_location, COND_EXPR, void_type_node, tmp, error, gfc_finish_block (&set_status_block)); } tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, null_mem, alloc, error); gfc_add_expr_to_block (block, tmp); return res; }
/* Allocate memory, using an optional status argument. This function follows the following pseudo-code: void * allocate (size_t size, integer_type* stat) { void *newmem; if (stat) *stat = 0; newmem = malloc (MAX (size, 1)); if (newmem == NULL) { if (stat) *stat = LIBERROR_ALLOCATION; else runtime_error ("Allocation would exceed memory limit"); } return newmem; } */ tree gfc_allocate_with_status (stmtblock_t * block, tree size, tree status) { stmtblock_t alloc_block; tree res, tmp, msg, cond; tree status_type = status ? TREE_TYPE (TREE_TYPE (status)) : NULL_TREE; /* Evaluate size only once, and make sure it has the right type. */ size = gfc_evaluate_now (size, block); if (TREE_TYPE (size) != TREE_TYPE (size_type_node)) size = fold_convert (size_type_node, size); /* Create a variable to hold the result. */ res = gfc_create_var (prvoid_type_node, NULL); /* Set the optional status variable to zero. */ if (status != NULL_TREE && !integer_zerop (status)) { tmp = fold_build2_loc (input_location, MODIFY_EXPR, status_type, fold_build1_loc (input_location, INDIRECT_REF, status_type, status), build_int_cst (status_type, 0)); tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, fold_build2_loc (input_location, NE_EXPR, boolean_type_node, status, build_int_cst (TREE_TYPE (status), 0)), tmp, build_empty_stmt (input_location)); gfc_add_expr_to_block (block, tmp); } /* The allocation itself. */ gfc_start_block (&alloc_block); gfc_add_modify (&alloc_block, res, fold_convert (prvoid_type_node, build_call_expr_loc (input_location, built_in_decls[BUILT_IN_MALLOC], 1, fold_build2_loc (input_location, MAX_EXPR, size_type_node, size, build_int_cst (size_type_node, 1))))); msg = gfc_build_addr_expr (pchar_type_node, gfc_build_localized_cstring_const ("Allocation would exceed memory limit")); tmp = build_call_expr_loc (input_location, gfor_fndecl_os_error, 1, msg); if (status != NULL_TREE && !integer_zerop (status)) { /* Set the status variable if it's present. */ tree tmp2; cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, status, build_int_cst (TREE_TYPE (status), 0)); tmp2 = fold_build2_loc (input_location, MODIFY_EXPR, status_type, fold_build1_loc (input_location, INDIRECT_REF, status_type, status), build_int_cst (status_type, LIBERROR_ALLOCATION)); tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, cond, tmp, tmp2); } tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, res, build_int_cst (prvoid_type_node, 0)), tmp, build_empty_stmt (input_location)); gfc_add_expr_to_block (&alloc_block, tmp); gfc_add_expr_to_block (block, gfc_finish_block (&alloc_block)); return res; }
static tree gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock, gfc_omp_clauses *do_clauses, tree par_clauses) { gfc_se se; tree dovar, stmt, from, to, step, type, init, cond, incr; tree count = NULL_TREE, cycle_label, tmp, omp_clauses; stmtblock_t block; stmtblock_t body; gfc_omp_clauses *clauses = code->ext.omp_clauses; int i, collapse = clauses->collapse; tree dovar_init = NULL_TREE; if (collapse <= 0) collapse = 1; code = code->block->next; gcc_assert (code->op == EXEC_DO); init = make_tree_vec (collapse); cond = make_tree_vec (collapse); incr = make_tree_vec (collapse); if (pblock == NULL) { gfc_start_block (&block); pblock = █ } omp_clauses = gfc_trans_omp_clauses (pblock, do_clauses, code->loc); for (i = 0; i < collapse; i++) { int simple = 0; int dovar_found = 0; tree dovar_decl; if (clauses) { gfc_namelist *n; for (n = clauses->lists[OMP_LIST_LASTPRIVATE]; n != NULL; n = n->next) if (code->ext.iterator->var->symtree->n.sym == n->sym) break; if (n != NULL) dovar_found = 1; else if (n == NULL) for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next) if (code->ext.iterator->var->symtree->n.sym == n->sym) break; if (n != NULL) dovar_found++; } /* Evaluate all the expressions in the iterator. */ gfc_init_se (&se, NULL); gfc_conv_expr_lhs (&se, code->ext.iterator->var); gfc_add_block_to_block (pblock, &se.pre); dovar = se.expr; type = TREE_TYPE (dovar); gcc_assert (TREE_CODE (type) == INTEGER_TYPE); gfc_init_se (&se, NULL); gfc_conv_expr_val (&se, code->ext.iterator->start); gfc_add_block_to_block (pblock, &se.pre); from = gfc_evaluate_now (se.expr, pblock); gfc_init_se (&se, NULL); gfc_conv_expr_val (&se, code->ext.iterator->end); gfc_add_block_to_block (pblock, &se.pre); to = gfc_evaluate_now (se.expr, pblock); gfc_init_se (&se, NULL); gfc_conv_expr_val (&se, code->ext.iterator->step); gfc_add_block_to_block (pblock, &se.pre); step = gfc_evaluate_now (se.expr, pblock); dovar_decl = dovar; /* Special case simple loops. */ if (TREE_CODE (dovar) == VAR_DECL) { if (integer_onep (step)) simple = 1; else if (tree_int_cst_equal (step, integer_minus_one_node)) simple = -1; } else dovar_decl = gfc_trans_omp_variable (code->ext.iterator->var->symtree->n.sym); /* Loop body. */ if (simple) { TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, dovar, from); TREE_VEC_ELT (cond, i) = fold_build2 (simple > 0 ? LE_EXPR : GE_EXPR, boolean_type_node, dovar, to); TREE_VEC_ELT (incr, i) = fold_build2 (PLUS_EXPR, type, dovar, step); TREE_VEC_ELT (incr, i) = fold_build2 (MODIFY_EXPR, type, dovar, TREE_VEC_ELT (incr, i)); } else { /* STEP is not 1 or -1. Use: for (count = 0; count < (to + step - from) / step; count++) { dovar = from + count * step; body; cycle_label:; } */ tmp = fold_build2 (MINUS_EXPR, type, step, from); tmp = fold_build2 (PLUS_EXPR, type, to, tmp); tmp = fold_build2 (TRUNC_DIV_EXPR, type, tmp, step); tmp = gfc_evaluate_now (tmp, pblock); count = gfc_create_var (type, "count"); TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, count, build_int_cst (type, 0)); TREE_VEC_ELT (cond, i) = fold_build2 (LT_EXPR, boolean_type_node, count, tmp); TREE_VEC_ELT (incr, i) = fold_build2 (PLUS_EXPR, type, count, build_int_cst (type, 1)); TREE_VEC_ELT (incr, i) = fold_build2 (MODIFY_EXPR, type, count, TREE_VEC_ELT (incr, i)); /* Initialize DOVAR. */ tmp = fold_build2 (MULT_EXPR, type, count, step); tmp = fold_build2 (PLUS_EXPR, type, from, tmp); dovar_init = tree_cons (dovar, tmp, dovar_init); } if (!dovar_found) { tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE); OMP_CLAUSE_DECL (tmp) = dovar_decl; omp_clauses = gfc_trans_add_clause (tmp, omp_clauses); } else if (dovar_found == 2) { tree c = NULL; tmp = NULL; if (!simple) { /* If dovar is lastprivate, but different counter is used, dovar += step needs to be added to OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar will have the value on entry of the last loop, rather than value after iterator increment. */ tmp = gfc_evaluate_now (step, pblock); tmp = fold_build2 (PLUS_EXPR, type, dovar, tmp); tmp = fold_build2 (MODIFY_EXPR, type, dovar, tmp); for (c = omp_clauses; c ; c = OMP_CLAUSE_CHAIN (c)) if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE && OMP_CLAUSE_DECL (c) == dovar_decl) { OMP_CLAUSE_LASTPRIVATE_STMT (c) = tmp; break; } } if (c == NULL && par_clauses != NULL) { for (c = par_clauses; c ; c = OMP_CLAUSE_CHAIN (c)) if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE && OMP_CLAUSE_DECL (c) == dovar_decl) { tree l = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE); OMP_CLAUSE_DECL (l) = dovar_decl; OMP_CLAUSE_CHAIN (l) = omp_clauses; OMP_CLAUSE_LASTPRIVATE_STMT (l) = tmp; omp_clauses = l; OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_SHARED); break; } } gcc_assert (simple || c != NULL); } if (!simple) { tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE); OMP_CLAUSE_DECL (tmp) = count; omp_clauses = gfc_trans_add_clause (tmp, omp_clauses); } if (i + 1 < collapse) code = code->block->next; } if (pblock != &block) { pushlevel (0); gfc_start_block (&block); } gfc_start_block (&body); dovar_init = nreverse (dovar_init); while (dovar_init) { gfc_add_modify (&body, TREE_PURPOSE (dovar_init), TREE_VALUE (dovar_init)); dovar_init = TREE_CHAIN (dovar_init); } /* Cycle statement is implemented with a goto. Exit statement must not be present for this loop. */ cycle_label = gfc_build_label_decl (NULL_TREE); /* Put these labels where they can be found later. We put the labels in a TREE_LIST node (because TREE_CHAIN is already used). cycle_label goes in TREE_PURPOSE (backend_decl), exit label in TREE_VALUE (backend_decl). */ code->block->backend_decl = tree_cons (cycle_label, NULL, NULL); /* Main loop body. */ tmp = gfc_trans_omp_code (code->block->next, true); gfc_add_expr_to_block (&body, tmp); /* Label for cycle statements (if needed). */ if (TREE_USED (cycle_label)) { tmp = build1_v (LABEL_EXPR, cycle_label); gfc_add_expr_to_block (&body, tmp); } /* End of loop body. */ stmt = make_node (OMP_FOR); TREE_TYPE (stmt) = void_type_node; OMP_FOR_BODY (stmt) = gfc_finish_block (&body); OMP_FOR_CLAUSES (stmt) = omp_clauses; OMP_FOR_INIT (stmt) = init; OMP_FOR_COND (stmt) = cond; OMP_FOR_INCR (stmt) = incr; gfc_add_expr_to_block (&block, stmt); return gfc_finish_block (&block); }
/* Allocate memory, using an optional status argument. This function follows the following pseudo-code: void * allocate (size_t size, integer_type* stat) { void *newmem; if (stat) *stat = 0; // The only time this can happen is the size wraps around. if (size < 0) { if (stat) { *stat = LIBERROR_ALLOCATION; newmem = NULL; } else runtime_error ("Attempt to allocate negative amount of memory. " "Possible integer overflow"); } else { newmem = malloc (MAX (size, 1)); if (newmem == NULL) { if (stat) *stat = LIBERROR_ALLOCATION; else runtime_error ("Out of memory"); } } return newmem; } */ tree gfc_allocate_with_status (stmtblock_t * block, tree size, tree status) { stmtblock_t alloc_block; tree res, tmp, error, msg, cond; tree status_type = status ? TREE_TYPE (TREE_TYPE (status)) : NULL_TREE; /* Evaluate size only once, and make sure it has the right type. */ size = gfc_evaluate_now (size, block); if (TREE_TYPE (size) != TREE_TYPE (size_type_node)) size = fold_convert (size_type_node, size); /* Create a variable to hold the result. */ res = gfc_create_var (prvoid_type_node, NULL); /* Set the optional status variable to zero. */ if (status != NULL_TREE && !integer_zerop (status)) { tmp = fold_build2 (MODIFY_EXPR, status_type, fold_build1 (INDIRECT_REF, status_type, status), build_int_cst (status_type, 0)); tmp = fold_build3 (COND_EXPR, void_type_node, fold_build2 (NE_EXPR, boolean_type_node, status, build_int_cst (TREE_TYPE (status), 0)), tmp, build_empty_stmt (input_location)); gfc_add_expr_to_block (block, tmp); } /* Generate the block of code handling (size < 0). */ msg = gfc_build_addr_expr (pchar_type_node, gfc_build_localized_cstring_const ("Attempt to allocate negative amount of memory. " "Possible integer overflow")); error = build_call_expr_loc (input_location, gfor_fndecl_runtime_error, 1, msg); if (status != NULL_TREE && !integer_zerop (status)) { /* Set the status variable if it's present. */ stmtblock_t set_status_block; gfc_start_block (&set_status_block); gfc_add_modify (&set_status_block, fold_build1 (INDIRECT_REF, status_type, status), build_int_cst (status_type, LIBERROR_ALLOCATION)); gfc_add_modify (&set_status_block, res, build_int_cst (prvoid_type_node, 0)); tmp = fold_build2 (EQ_EXPR, boolean_type_node, status, build_int_cst (TREE_TYPE (status), 0)); error = fold_build3 (COND_EXPR, void_type_node, tmp, error, gfc_finish_block (&set_status_block)); } /* The allocation itself. */ gfc_start_block (&alloc_block); gfc_add_modify (&alloc_block, res, fold_convert (prvoid_type_node, build_call_expr_loc (input_location, built_in_decls[BUILT_IN_MALLOC], 1, fold_build2 (MAX_EXPR, size_type_node, size, build_int_cst (size_type_node, 1))))); msg = gfc_build_addr_expr (pchar_type_node, gfc_build_localized_cstring_const ("Out of memory")); tmp = build_call_expr_loc (input_location, gfor_fndecl_os_error, 1, msg); if (status != NULL_TREE && !integer_zerop (status)) { /* Set the status variable if it's present. */ tree tmp2; cond = fold_build2 (EQ_EXPR, boolean_type_node, status, build_int_cst (TREE_TYPE (status), 0)); tmp2 = fold_build2 (MODIFY_EXPR, status_type, fold_build1 (INDIRECT_REF, status_type, status), build_int_cst (status_type, LIBERROR_ALLOCATION)); tmp = fold_build3 (COND_EXPR, void_type_node, cond, tmp, tmp2); } tmp = fold_build3 (COND_EXPR, void_type_node, fold_build2 (EQ_EXPR, boolean_type_node, res, build_int_cst (prvoid_type_node, 0)), tmp, build_empty_stmt (input_location)); gfc_add_expr_to_block (&alloc_block, tmp); cond = fold_build2 (LT_EXPR, boolean_type_node, size, build_int_cst (TREE_TYPE (size), 0)); tmp = fold_build3 (COND_EXPR, void_type_node, cond, error, gfc_finish_block (&alloc_block)); gfc_add_expr_to_block (block, tmp); return res; }