static tree cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data) { tree stmt = *stmt_p; struct pointer_set_t *p_set = (struct pointer_set_t*) data; if (is_invisiref_parm (stmt) /* Don't dereference parms in a thunk, pass the references through. */ && !(DECL_THUNK_P (current_function_decl) && TREE_CODE (stmt) == PARM_DECL)) { *stmt_p = convert_from_reference (stmt); *walk_subtrees = 0; return NULL; } /* Map block scope extern declarations to visible declarations with the same name and type in outer scopes if any. */ if (cp_function_chain->extern_decl_map && (TREE_CODE (stmt) == FUNCTION_DECL || TREE_CODE (stmt) == VAR_DECL) && DECL_EXTERNAL (stmt)) { struct cxx_int_tree_map *h, in; in.uid = DECL_UID (stmt); h = (struct cxx_int_tree_map *) htab_find_with_hash (cp_function_chain->extern_decl_map, &in, in.uid); if (h) { *stmt_p = h->to; *walk_subtrees = 0; return NULL; } } /* Other than invisiref parms, don't walk the same tree twice. */ if (pointer_set_contains (p_set, stmt)) { *walk_subtrees = 0; return NULL_TREE; } if (TREE_CODE (stmt) == ADDR_EXPR && is_invisiref_parm (TREE_OPERAND (stmt, 0))) { *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0)); *walk_subtrees = 0; } else if (TREE_CODE (stmt) == RETURN_EXPR && TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0))) /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */ *walk_subtrees = 0; else if (TREE_CODE (stmt) == OMP_CLAUSE) switch (OMP_CLAUSE_CODE (stmt)) { case OMP_CLAUSE_LASTPRIVATE: /* Don't dereference an invisiref in OpenMP clauses. */ if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) { *walk_subtrees = 0; if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt)) cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt), cp_genericize_r, p_set, NULL); } break; case OMP_CLAUSE_PRIVATE: case OMP_CLAUSE_SHARED: case OMP_CLAUSE_FIRSTPRIVATE: case OMP_CLAUSE_COPYIN: case OMP_CLAUSE_COPYPRIVATE: /* Don't dereference an invisiref in OpenMP clauses. */ if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) *walk_subtrees = 0; break; case OMP_CLAUSE_REDUCTION: gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt))); break; default: break; } else if (IS_TYPE_OR_DECL_P (stmt)) *walk_subtrees = 0; /* Due to the way voidify_wrapper_expr is written, we don't get a chance to lower this construct before scanning it, so we need to lower these before doing anything else. */ else if (TREE_CODE (stmt) == CLEANUP_STMT) *stmt_p = build2 (CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR : TRY_FINALLY_EXPR, void_type_node, CLEANUP_BODY (stmt), CLEANUP_EXPR (stmt)); else if (TREE_CODE (stmt) == IF_STMT) { genericize_if_stmt (stmt_p); /* *stmt_p has changed, tail recurse to handle it again. */ return cp_genericize_r (stmt_p, walk_subtrees, data); } /* COND_EXPR might have incompatible types in branches if one or both arms are bitfields. Fix it up now. */ else if (TREE_CODE (stmt) == COND_EXPR) { tree type_left = (TREE_OPERAND (stmt, 1) ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1)) : NULL_TREE); tree type_right = (TREE_OPERAND (stmt, 2) ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2)) : NULL_TREE); if (type_left && !useless_type_conversion_p (TREE_TYPE (stmt), TREE_TYPE (TREE_OPERAND (stmt, 1)))) { TREE_OPERAND (stmt, 1) = fold_convert (type_left, TREE_OPERAND (stmt, 1)); gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt), type_left)); } if (type_right && !useless_type_conversion_p (TREE_TYPE (stmt), TREE_TYPE (TREE_OPERAND (stmt, 2)))) { TREE_OPERAND (stmt, 2) = fold_convert (type_right, TREE_OPERAND (stmt, 2)); gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt), type_right)); } } pointer_set_insert (p_set, *stmt_p); return NULL; }
static tree cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data) { tree stmt = *stmt_p; struct cp_genericize_data *wtd = (struct cp_genericize_data *) data; struct pointer_set_t *p_set = wtd->p_set; /* If in an OpenMP context, note var uses. */ if (__builtin_expect (wtd->omp_ctx != NULL, 0) && (VAR_P (stmt) || TREE_CODE (stmt) == PARM_DECL || TREE_CODE (stmt) == RESULT_DECL) && omp_var_to_track (stmt)) omp_cxx_notice_variable (wtd->omp_ctx, stmt); if (is_invisiref_parm (stmt) /* Don't dereference parms in a thunk, pass the references through. */ && !(DECL_THUNK_P (current_function_decl) && TREE_CODE (stmt) == PARM_DECL)) { *stmt_p = convert_from_reference (stmt); *walk_subtrees = 0; return NULL; } /* Map block scope extern declarations to visible declarations with the same name and type in outer scopes if any. */ if (cp_function_chain->extern_decl_map && VAR_OR_FUNCTION_DECL_P (stmt) && DECL_EXTERNAL (stmt)) { struct cxx_int_tree_map *h, in; in.uid = DECL_UID (stmt); h = (struct cxx_int_tree_map *) htab_find_with_hash (cp_function_chain->extern_decl_map, &in, in.uid); if (h) { *stmt_p = h->to; *walk_subtrees = 0; return NULL; } } /* Other than invisiref parms, don't walk the same tree twice. */ if (pointer_set_contains (p_set, stmt)) { *walk_subtrees = 0; return NULL_TREE; } if (TREE_CODE (stmt) == ADDR_EXPR && is_invisiref_parm (TREE_OPERAND (stmt, 0))) { /* If in an OpenMP context, note var uses. */ if (__builtin_expect (wtd->omp_ctx != NULL, 0) && omp_var_to_track (TREE_OPERAND (stmt, 0))) omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0)); *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0)); *walk_subtrees = 0; } else if (TREE_CODE (stmt) == RETURN_EXPR && TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0))) /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */ *walk_subtrees = 0; else if (TREE_CODE (stmt) == OMP_CLAUSE) switch (OMP_CLAUSE_CODE (stmt)) { case OMP_CLAUSE_LASTPRIVATE: /* Don't dereference an invisiref in OpenMP clauses. */ if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) { *walk_subtrees = 0; if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt)) cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt), cp_genericize_r, data, NULL); } break; case OMP_CLAUSE_PRIVATE: /* Don't dereference an invisiref in OpenMP clauses. */ if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) *walk_subtrees = 0; else if (wtd->omp_ctx != NULL) { /* Private clause doesn't cause any references to the var in outer contexts, avoid calling omp_cxx_notice_variable for it. */ struct cp_genericize_omp_taskreg *old = wtd->omp_ctx; wtd->omp_ctx = NULL; cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r, data, NULL); wtd->omp_ctx = old; *walk_subtrees = 0; } break; case OMP_CLAUSE_SHARED: case OMP_CLAUSE_FIRSTPRIVATE: case OMP_CLAUSE_COPYIN: case OMP_CLAUSE_COPYPRIVATE: /* Don't dereference an invisiref in OpenMP clauses. */ if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) *walk_subtrees = 0; break; case OMP_CLAUSE_REDUCTION: /* Don't dereference an invisiref in reduction clause's OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE} still needs to be genericized. */ if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) { *walk_subtrees = 0; if (OMP_CLAUSE_REDUCTION_INIT (stmt)) cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt), cp_genericize_r, data, NULL); if (OMP_CLAUSE_REDUCTION_MERGE (stmt)) cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt), cp_genericize_r, data, NULL); } break; default: break; } else if (IS_TYPE_OR_DECL_P (stmt)) *walk_subtrees = 0; /* Due to the way voidify_wrapper_expr is written, we don't get a chance to lower this construct before scanning it, so we need to lower these before doing anything else. */ else if (TREE_CODE (stmt) == CLEANUP_STMT) *stmt_p = build2_loc (EXPR_LOCATION (stmt), CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR : TRY_FINALLY_EXPR, void_type_node, CLEANUP_BODY (stmt), CLEANUP_EXPR (stmt)); else if (TREE_CODE (stmt) == IF_STMT) { genericize_if_stmt (stmt_p); /* *stmt_p has changed, tail recurse to handle it again. */ return cp_genericize_r (stmt_p, walk_subtrees, data); } /* COND_EXPR might have incompatible types in branches if one or both arms are bitfields. Fix it up now. */ else if (TREE_CODE (stmt) == COND_EXPR) { tree type_left = (TREE_OPERAND (stmt, 1) ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1)) : NULL_TREE); tree type_right = (TREE_OPERAND (stmt, 2) ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2)) : NULL_TREE); if (type_left && !useless_type_conversion_p (TREE_TYPE (stmt), TREE_TYPE (TREE_OPERAND (stmt, 1)))) { TREE_OPERAND (stmt, 1) = fold_convert (type_left, TREE_OPERAND (stmt, 1)); gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt), type_left)); } if (type_right && !useless_type_conversion_p (TREE_TYPE (stmt), TREE_TYPE (TREE_OPERAND (stmt, 2)))) { TREE_OPERAND (stmt, 2) = fold_convert (type_right, TREE_OPERAND (stmt, 2)); gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt), type_right)); } } else if (TREE_CODE (stmt) == BIND_EXPR) { if (__builtin_expect (wtd->omp_ctx != NULL, 0)) { tree decl; for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl)) if (VAR_P (decl) && !DECL_EXTERNAL (decl) && omp_var_to_track (decl)) { splay_tree_node n = splay_tree_lookup (wtd->omp_ctx->variables, (splay_tree_key) decl); if (n == NULL) splay_tree_insert (wtd->omp_ctx->variables, (splay_tree_key) decl, TREE_STATIC (decl) ? OMP_CLAUSE_DEFAULT_SHARED : OMP_CLAUSE_DEFAULT_PRIVATE); } } wtd->bind_expr_stack.safe_push (stmt); cp_walk_tree (&BIND_EXPR_BODY (stmt), cp_genericize_r, data, NULL); wtd->bind_expr_stack.pop (); } else if (TREE_CODE (stmt) == USING_STMT) { tree block = NULL_TREE; /* Get the innermost inclosing GIMPLE_BIND that has a non NULL BLOCK, and append an IMPORTED_DECL to its BLOCK_VARS chained list. */ if (wtd->bind_expr_stack.exists ()) { int i; for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--) if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i]))) break; } if (block) { tree using_directive; gcc_assert (TREE_OPERAND (stmt, 0)); using_directive = make_node (IMPORTED_DECL); TREE_TYPE (using_directive) = void_type_node; IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = TREE_OPERAND (stmt, 0); DECL_CHAIN (using_directive) = BLOCK_VARS (block); BLOCK_VARS (block) = using_directive; } /* The USING_STMT won't appear in GENERIC. */ *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node); *walk_subtrees = 0; } else if (TREE_CODE (stmt) == DECL_EXPR && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL) { /* Using decls inside DECL_EXPRs are just dropped on the floor. */ *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node); *walk_subtrees = 0; } else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK) { struct cp_genericize_omp_taskreg omp_ctx; tree c, decl; splay_tree_node n; *walk_subtrees = 0; cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL); omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL; omp_ctx.default_shared = omp_ctx.is_parallel; omp_ctx.outer = wtd->omp_ctx; omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0); wtd->omp_ctx = &omp_ctx; for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c)) switch (OMP_CLAUSE_CODE (c)) { case OMP_CLAUSE_SHARED: case OMP_CLAUSE_PRIVATE: case OMP_CLAUSE_FIRSTPRIVATE: case OMP_CLAUSE_LASTPRIVATE: decl = OMP_CLAUSE_DECL (c); if (decl == error_mark_node || !omp_var_to_track (decl)) break; n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl); if (n != NULL) break; splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl, OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED ? OMP_CLAUSE_DEFAULT_SHARED : OMP_CLAUSE_DEFAULT_PRIVATE); if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer) omp_cxx_notice_variable (omp_ctx.outer, decl); break; case OMP_CLAUSE_DEFAULT: if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED) omp_ctx.default_shared = true; default: break; } cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL); wtd->omp_ctx = omp_ctx.outer; splay_tree_delete (omp_ctx.variables); } else if (TREE_CODE (stmt) == CONVERT_EXPR) gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt)); else if (TREE_CODE (stmt) == FOR_STMT) genericize_for_stmt (stmt_p, walk_subtrees, data); else if (TREE_CODE (stmt) == WHILE_STMT) genericize_while_stmt (stmt_p, walk_subtrees, data); else if (TREE_CODE (stmt) == DO_STMT) genericize_do_stmt (stmt_p, walk_subtrees, data); else if (TREE_CODE (stmt) == SWITCH_STMT) genericize_switch_stmt (stmt_p, walk_subtrees, data); else if (TREE_CODE (stmt) == CONTINUE_STMT) genericize_continue_stmt (stmt_p); else if (TREE_CODE (stmt) == BREAK_STMT) genericize_break_stmt (stmt_p); else if (TREE_CODE (stmt) == OMP_FOR || TREE_CODE (stmt) == OMP_SIMD || TREE_CODE (stmt) == OMP_DISTRIBUTE) genericize_omp_for_stmt (stmt_p, walk_subtrees, data); else if (TREE_CODE (stmt) == SIZEOF_EXPR) { if (SIZEOF_EXPR_TYPE_P (stmt)) *stmt_p = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (stmt, 0)), SIZEOF_EXPR, false); else if (TYPE_P (TREE_OPERAND (stmt, 0))) *stmt_p = cxx_sizeof_or_alignof_type (TREE_OPERAND (stmt, 0), SIZEOF_EXPR, false); else *stmt_p = cxx_sizeof_or_alignof_expr (TREE_OPERAND (stmt, 0), SIZEOF_EXPR, false); if (*stmt_p == error_mark_node) *stmt_p = size_one_node; return NULL; } pointer_set_insert (p_set, *stmt_p); return NULL; }
static tree cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data) { tree stmt = *stmt_p; struct cp_genericize_data *wtd = (struct cp_genericize_data *) data; struct pointer_set_t *p_set = wtd->p_set; if (is_invisiref_parm (stmt) /* Don't dereference parms in a thunk, pass the references through. */ && !(DECL_THUNK_P (current_function_decl) && TREE_CODE (stmt) == PARM_DECL)) { *stmt_p = convert_from_reference (stmt); *walk_subtrees = 0; return NULL; } /* Map block scope extern declarations to visible declarations with the same name and type in outer scopes if any. */ if (cp_function_chain->extern_decl_map && (TREE_CODE (stmt) == FUNCTION_DECL || TREE_CODE (stmt) == VAR_DECL) && DECL_EXTERNAL (stmt)) { struct cxx_int_tree_map *h, in; in.uid = DECL_UID (stmt); h = (struct cxx_int_tree_map *) htab_find_with_hash (cp_function_chain->extern_decl_map, &in, in.uid); if (h) { *stmt_p = h->to; *walk_subtrees = 0; return NULL; } } /* Other than invisiref parms, don't walk the same tree twice. */ if (pointer_set_contains (p_set, stmt)) { *walk_subtrees = 0; return NULL_TREE; } if (TREE_CODE (stmt) == ADDR_EXPR && is_invisiref_parm (TREE_OPERAND (stmt, 0))) { *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0)); *walk_subtrees = 0; } else if (TREE_CODE (stmt) == RETURN_EXPR && TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0))) /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */ *walk_subtrees = 0; else if (TREE_CODE (stmt) == OMP_CLAUSE) switch (OMP_CLAUSE_CODE (stmt)) { case OMP_CLAUSE_LASTPRIVATE: /* Don't dereference an invisiref in OpenMP clauses. */ if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) { *walk_subtrees = 0; if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt)) cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt), cp_genericize_r, data, NULL); } break; case OMP_CLAUSE_PRIVATE: case OMP_CLAUSE_SHARED: case OMP_CLAUSE_FIRSTPRIVATE: case OMP_CLAUSE_COPYIN: case OMP_CLAUSE_COPYPRIVATE: /* Don't dereference an invisiref in OpenMP clauses. */ if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) *walk_subtrees = 0; break; case OMP_CLAUSE_REDUCTION: gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt))); break; default: break; } else if (IS_TYPE_OR_DECL_P (stmt)) *walk_subtrees = 0; /* Due to the way voidify_wrapper_expr is written, we don't get a chance to lower this construct before scanning it, so we need to lower these before doing anything else. */ else if (TREE_CODE (stmt) == CLEANUP_STMT) *stmt_p = build2 (CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR : TRY_FINALLY_EXPR, void_type_node, CLEANUP_BODY (stmt), CLEANUP_EXPR (stmt)); else if (TREE_CODE (stmt) == IF_STMT) { genericize_if_stmt (stmt_p); /* *stmt_p has changed, tail recurse to handle it again. */ return cp_genericize_r (stmt_p, walk_subtrees, data); } /* COND_EXPR might have incompatible types in branches if one or both arms are bitfields.FILE * my_dump_begin (int phase, int *flag_ptr) Fix it up now. */ else if (TREE_CODE (stmt) == COND_EXPR) { tree type_left = (TREE_OPERAND (stmt, 1) ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1)) : NULL_TREE); tree type_right = (TREE_OPERAND (stmt, 2) ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2)) : NULL_TREE); if (type_left && !useless_type_conversion_p (TREE_TYPE (stmt), TREE_TYPE (TREE_OPERAND (stmt, 1)))) { TREE_OPERAND (stmt, 1) = fold_convert (type_left, TREE_OPERAND (stmt, 1)); gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt), type_left)); } if (type_right && !useless_type_conversion_p (TREE_TYPE (stmt), TREE_TYPE (TREE_OPERAND (stmt, 2)))) { TREE_OPERAND (stmt, 2) = fold_convert (type_right, TREE_OPERAND (stmt, 2)); gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt), type_right)); } } else if (TREE_CODE (stmt) == BIND_EXPR) { VEC_safe_push (tree, heap, wtd->bind_expr_stack, stmt); cp_walk_tree (&BIND_EXPR_BODY (stmt), cp_genericize_r, data, NULL); VEC_pop (tree, wtd->bind_expr_stack); } else if (TREE_CODE (stmt) == USING_STMT) { tree block = NULL_TREE; /* Get the innermost inclosing GIMPLE_BIND that has a non NULL BLOCK, and append an IMPORTED_DECL to its BLOCK_VARS chained list. */ if (wtd->bind_expr_stack) { int i; for (i = VEC_length (tree, wtd->bind_expr_stack) - 1; i >= 0; i--) if ((block = BIND_EXPR_BLOCK (VEC_index (tree, wtd->bind_expr_stack, i)))) break; } if (block) { tree using_directive; gcc_assert (TREE_OPERAND (stmt, 0)); using_directive = make_node (IMPORTED_DECL); TREE_TYPE (using_directive) = void_type_node; IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = TREE_OPERAND (stmt, 0); DECL_CHAIN (using_directive) = BLOCK_VARS (block); BLOCK_VARS (block) = using_directive; } /* The USING_STMT won't appear in GENERIC. */ *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node); *walk_subtrees = 0; } else if (TREE_CODE (stmt) == DECL_EXPR && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL) { /* Using decls inside DECL_EXPRs are just dropped on the floor. */ *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node); *walk_subtrees = 0; } pointer_set_insert (p_set, *stmt_p); return NULL; }
static tree gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock, gfc_omp_clauses *do_clauses, tree par_clauses) { gfc_se se; tree dovar, stmt, from, to, step, type, init, cond, incr; tree count = NULL_TREE, cycle_label, tmp, omp_clauses; stmtblock_t block; stmtblock_t body; gfc_omp_clauses *clauses = code->ext.omp_clauses; int i, collapse = clauses->collapse; tree dovar_init = NULL_TREE; if (collapse <= 0) collapse = 1; code = code->block->next; gcc_assert (code->op == EXEC_DO); init = make_tree_vec (collapse); cond = make_tree_vec (collapse); incr = make_tree_vec (collapse); if (pblock == NULL) { gfc_start_block (&block); pblock = █ } omp_clauses = gfc_trans_omp_clauses (pblock, do_clauses, code->loc); for (i = 0; i < collapse; i++) { int simple = 0; int dovar_found = 0; tree dovar_decl; if (clauses) { gfc_namelist *n; for (n = clauses->lists[OMP_LIST_LASTPRIVATE]; n != NULL; n = n->next) if (code->ext.iterator->var->symtree->n.sym == n->sym) break; if (n != NULL) dovar_found = 1; else if (n == NULL) for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next) if (code->ext.iterator->var->symtree->n.sym == n->sym) break; if (n != NULL) dovar_found++; } /* Evaluate all the expressions in the iterator. */ gfc_init_se (&se, NULL); gfc_conv_expr_lhs (&se, code->ext.iterator->var); gfc_add_block_to_block (pblock, &se.pre); dovar = se.expr; type = TREE_TYPE (dovar); gcc_assert (TREE_CODE (type) == INTEGER_TYPE); gfc_init_se (&se, NULL); gfc_conv_expr_val (&se, code->ext.iterator->start); gfc_add_block_to_block (pblock, &se.pre); from = gfc_evaluate_now (se.expr, pblock); gfc_init_se (&se, NULL); gfc_conv_expr_val (&se, code->ext.iterator->end); gfc_add_block_to_block (pblock, &se.pre); to = gfc_evaluate_now (se.expr, pblock); gfc_init_se (&se, NULL); gfc_conv_expr_val (&se, code->ext.iterator->step); gfc_add_block_to_block (pblock, &se.pre); step = gfc_evaluate_now (se.expr, pblock); dovar_decl = dovar; /* Special case simple loops. */ if (TREE_CODE (dovar) == VAR_DECL) { if (integer_onep (step)) simple = 1; else if (tree_int_cst_equal (step, integer_minus_one_node)) simple = -1; } else dovar_decl = gfc_trans_omp_variable (code->ext.iterator->var->symtree->n.sym); /* Loop body. */ if (simple) { TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, dovar, from); TREE_VEC_ELT (cond, i) = fold_build2 (simple > 0 ? LE_EXPR : GE_EXPR, boolean_type_node, dovar, to); TREE_VEC_ELT (incr, i) = fold_build2 (PLUS_EXPR, type, dovar, step); TREE_VEC_ELT (incr, i) = fold_build2 (MODIFY_EXPR, type, dovar, TREE_VEC_ELT (incr, i)); } else { /* STEP is not 1 or -1. Use: for (count = 0; count < (to + step - from) / step; count++) { dovar = from + count * step; body; cycle_label:; } */ tmp = fold_build2 (MINUS_EXPR, type, step, from); tmp = fold_build2 (PLUS_EXPR, type, to, tmp); tmp = fold_build2 (TRUNC_DIV_EXPR, type, tmp, step); tmp = gfc_evaluate_now (tmp, pblock); count = gfc_create_var (type, "count"); TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, count, build_int_cst (type, 0)); TREE_VEC_ELT (cond, i) = fold_build2 (LT_EXPR, boolean_type_node, count, tmp); TREE_VEC_ELT (incr, i) = fold_build2 (PLUS_EXPR, type, count, build_int_cst (type, 1)); TREE_VEC_ELT (incr, i) = fold_build2 (MODIFY_EXPR, type, count, TREE_VEC_ELT (incr, i)); /* Initialize DOVAR. */ tmp = fold_build2 (MULT_EXPR, type, count, step); tmp = fold_build2 (PLUS_EXPR, type, from, tmp); dovar_init = tree_cons (dovar, tmp, dovar_init); } if (!dovar_found) { tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE); OMP_CLAUSE_DECL (tmp) = dovar_decl; omp_clauses = gfc_trans_add_clause (tmp, omp_clauses); } else if (dovar_found == 2) { tree c = NULL; tmp = NULL; if (!simple) { /* If dovar is lastprivate, but different counter is used, dovar += step needs to be added to OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar will have the value on entry of the last loop, rather than value after iterator increment. */ tmp = gfc_evaluate_now (step, pblock); tmp = fold_build2 (PLUS_EXPR, type, dovar, tmp); tmp = fold_build2 (MODIFY_EXPR, type, dovar, tmp); for (c = omp_clauses; c ; c = OMP_CLAUSE_CHAIN (c)) if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE && OMP_CLAUSE_DECL (c) == dovar_decl) { OMP_CLAUSE_LASTPRIVATE_STMT (c) = tmp; break; } } if (c == NULL && par_clauses != NULL) { for (c = par_clauses; c ; c = OMP_CLAUSE_CHAIN (c)) if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE && OMP_CLAUSE_DECL (c) == dovar_decl) { tree l = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE); OMP_CLAUSE_DECL (l) = dovar_decl; OMP_CLAUSE_CHAIN (l) = omp_clauses; OMP_CLAUSE_LASTPRIVATE_STMT (l) = tmp; omp_clauses = l; OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_SHARED); break; } } gcc_assert (simple || c != NULL); } if (!simple) { tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE); OMP_CLAUSE_DECL (tmp) = count; omp_clauses = gfc_trans_add_clause (tmp, omp_clauses); } if (i + 1 < collapse) code = code->block->next; } if (pblock != &block) { pushlevel (0); gfc_start_block (&block); } gfc_start_block (&body); dovar_init = nreverse (dovar_init); while (dovar_init) { gfc_add_modify (&body, TREE_PURPOSE (dovar_init), TREE_VALUE (dovar_init)); dovar_init = TREE_CHAIN (dovar_init); } /* Cycle statement is implemented with a goto. Exit statement must not be present for this loop. */ cycle_label = gfc_build_label_decl (NULL_TREE); /* Put these labels where they can be found later. We put the labels in a TREE_LIST node (because TREE_CHAIN is already used). cycle_label goes in TREE_PURPOSE (backend_decl), exit label in TREE_VALUE (backend_decl). */ code->block->backend_decl = tree_cons (cycle_label, NULL, NULL); /* Main loop body. */ tmp = gfc_trans_omp_code (code->block->next, true); gfc_add_expr_to_block (&body, tmp); /* Label for cycle statements (if needed). */ if (TREE_USED (cycle_label)) { tmp = build1_v (LABEL_EXPR, cycle_label); gfc_add_expr_to_block (&body, tmp); } /* End of loop body. */ stmt = make_node (OMP_FOR); TREE_TYPE (stmt) = void_type_node; OMP_FOR_BODY (stmt) = gfc_finish_block (&body); OMP_FOR_CLAUSES (stmt) = omp_clauses; OMP_FOR_INIT (stmt) = init; OMP_FOR_COND (stmt) = cond; OMP_FOR_INCR (stmt) = incr; gfc_add_expr_to_block (&block, stmt); return gfc_finish_block (&block); }