void r600_begin_new_cs(struct r600_context *ctx) { unsigned shader; ctx->b.flags = 0; ctx->b.gtt = 0; ctx->b.vram = 0; /* Begin a new CS. */ r600_emit_command_buffer(ctx->b.gfx.cs, &ctx->start_cs_cmd); /* Re-emit states. */ r600_mark_atom_dirty(ctx, &ctx->alphatest_state.atom); r600_mark_atom_dirty(ctx, &ctx->blend_color.atom); r600_mark_atom_dirty(ctx, &ctx->cb_misc_state.atom); r600_mark_atom_dirty(ctx, &ctx->clip_misc_state.atom); r600_mark_atom_dirty(ctx, &ctx->clip_state.atom); r600_mark_atom_dirty(ctx, &ctx->db_misc_state.atom); r600_mark_atom_dirty(ctx, &ctx->db_state.atom); r600_mark_atom_dirty(ctx, &ctx->framebuffer.atom); r600_mark_atom_dirty(ctx, &ctx->hw_shader_stages[R600_HW_STAGE_PS].atom); r600_mark_atom_dirty(ctx, &ctx->poly_offset_state.atom); r600_mark_atom_dirty(ctx, &ctx->vgt_state.atom); r600_mark_atom_dirty(ctx, &ctx->sample_mask.atom); ctx->scissor.dirty_mask = (1 << R600_MAX_VIEWPORTS) - 1; ctx->scissor.atom.num_dw = R600_MAX_VIEWPORTS * 4; r600_mark_atom_dirty(ctx, &ctx->scissor.atom); ctx->viewport.dirty_mask = (1 << R600_MAX_VIEWPORTS) - 1; ctx->viewport.atom.num_dw = R600_MAX_VIEWPORTS * 8; r600_mark_atom_dirty(ctx, &ctx->viewport.atom); if (ctx->b.chip_class <= EVERGREEN) { r600_mark_atom_dirty(ctx, &ctx->config_state.atom); } r600_mark_atom_dirty(ctx, &ctx->stencil_ref.atom); r600_mark_atom_dirty(ctx, &ctx->vertex_fetch_shader.atom); r600_mark_atom_dirty(ctx, &ctx->hw_shader_stages[R600_HW_STAGE_ES].atom); r600_mark_atom_dirty(ctx, &ctx->shader_stages.atom); if (ctx->gs_shader) { r600_mark_atom_dirty(ctx, &ctx->hw_shader_stages[R600_HW_STAGE_GS].atom); r600_mark_atom_dirty(ctx, &ctx->gs_rings.atom); } if (ctx->tes_shader) { r600_mark_atom_dirty(ctx, &ctx->hw_shader_stages[EG_HW_STAGE_HS].atom); r600_mark_atom_dirty(ctx, &ctx->hw_shader_stages[EG_HW_STAGE_LS].atom); } r600_mark_atom_dirty(ctx, &ctx->hw_shader_stages[R600_HW_STAGE_VS].atom); r600_mark_atom_dirty(ctx, &ctx->b.streamout.enable_atom); r600_mark_atom_dirty(ctx, &ctx->b.render_cond_atom); if (ctx->blend_state.cso) r600_mark_atom_dirty(ctx, &ctx->blend_state.atom); if (ctx->dsa_state.cso) r600_mark_atom_dirty(ctx, &ctx->dsa_state.atom); if (ctx->rasterizer_state.cso) r600_mark_atom_dirty(ctx, &ctx->rasterizer_state.atom); if (ctx->b.chip_class <= R700) { r600_mark_atom_dirty(ctx, &ctx->seamless_cube_map.atom); } ctx->vertex_buffer_state.dirty_mask = ctx->vertex_buffer_state.enabled_mask; r600_vertex_buffers_dirty(ctx); /* Re-emit shader resources. */ for (shader = 0; shader < PIPE_SHADER_TYPES; shader++) { struct r600_constbuf_state *constbuf = &ctx->constbuf_state[shader]; struct r600_textures_info *samplers = &ctx->samplers[shader]; constbuf->dirty_mask = constbuf->enabled_mask; samplers->views.dirty_mask = samplers->views.enabled_mask; samplers->states.dirty_mask = samplers->states.enabled_mask; r600_constant_buffers_dirty(ctx, constbuf); r600_sampler_views_dirty(ctx, &samplers->views); r600_sampler_states_dirty(ctx, &samplers->states); } r600_postflush_resume_features(&ctx->b); /* Re-emit the draw state. */ ctx->last_primitive_type = -1; ctx->last_start_instance = -1; ctx->b.initial_gfx_cs_size = ctx->b.gfx.cs->cdw; }
void r600_begin_new_cs(struct r600_context *ctx) { unsigned shader; if (ctx->is_debug) { uint32_t zero = 0; /* Create a buffer used for writing trace IDs and initialize it to 0. */ assert(!ctx->trace_buf); ctx->trace_buf = (struct r600_resource*) pipe_buffer_create(ctx->b.b.screen, 0, PIPE_USAGE_STAGING, 4); if (ctx->trace_buf) pipe_buffer_write_nooverlap(&ctx->b.b, &ctx->trace_buf->b.b, 0, sizeof(zero), &zero); ctx->trace_id = 0; } if (ctx->trace_buf) eg_trace_emit(ctx); ctx->b.flags = 0; ctx->b.gtt = 0; ctx->b.vram = 0; /* Begin a new CS. */ r600_emit_command_buffer(ctx->b.gfx.cs, &ctx->start_cs_cmd); /* Re-emit states. */ r600_mark_atom_dirty(ctx, &ctx->alphatest_state.atom); r600_mark_atom_dirty(ctx, &ctx->blend_color.atom); r600_mark_atom_dirty(ctx, &ctx->cb_misc_state.atom); r600_mark_atom_dirty(ctx, &ctx->clip_misc_state.atom); r600_mark_atom_dirty(ctx, &ctx->clip_state.atom); r600_mark_atom_dirty(ctx, &ctx->db_misc_state.atom); r600_mark_atom_dirty(ctx, &ctx->db_state.atom); r600_mark_atom_dirty(ctx, &ctx->framebuffer.atom); if (ctx->b.chip_class >= EVERGREEN) { r600_mark_atom_dirty(ctx, &ctx->fragment_images.atom); r600_mark_atom_dirty(ctx, &ctx->fragment_buffers.atom); r600_mark_atom_dirty(ctx, &ctx->compute_images.atom); r600_mark_atom_dirty(ctx, &ctx->compute_buffers.atom); } r600_mark_atom_dirty(ctx, &ctx->hw_shader_stages[R600_HW_STAGE_PS].atom); r600_mark_atom_dirty(ctx, &ctx->poly_offset_state.atom); r600_mark_atom_dirty(ctx, &ctx->vgt_state.atom); r600_mark_atom_dirty(ctx, &ctx->sample_mask.atom); ctx->b.scissors.dirty_mask = (1 << R600_MAX_VIEWPORTS) - 1; r600_mark_atom_dirty(ctx, &ctx->b.scissors.atom); ctx->b.viewports.dirty_mask = (1 << R600_MAX_VIEWPORTS) - 1; ctx->b.viewports.depth_range_dirty_mask = (1 << R600_MAX_VIEWPORTS) - 1; r600_mark_atom_dirty(ctx, &ctx->b.viewports.atom); if (ctx->b.chip_class <= EVERGREEN) { r600_mark_atom_dirty(ctx, &ctx->config_state.atom); } r600_mark_atom_dirty(ctx, &ctx->stencil_ref.atom); r600_mark_atom_dirty(ctx, &ctx->vertex_fetch_shader.atom); r600_mark_atom_dirty(ctx, &ctx->hw_shader_stages[R600_HW_STAGE_ES].atom); r600_mark_atom_dirty(ctx, &ctx->shader_stages.atom); if (ctx->gs_shader) { r600_mark_atom_dirty(ctx, &ctx->hw_shader_stages[R600_HW_STAGE_GS].atom); r600_mark_atom_dirty(ctx, &ctx->gs_rings.atom); } if (ctx->tes_shader) { r600_mark_atom_dirty(ctx, &ctx->hw_shader_stages[EG_HW_STAGE_HS].atom); r600_mark_atom_dirty(ctx, &ctx->hw_shader_stages[EG_HW_STAGE_LS].atom); } r600_mark_atom_dirty(ctx, &ctx->hw_shader_stages[R600_HW_STAGE_VS].atom); r600_mark_atom_dirty(ctx, &ctx->b.streamout.enable_atom); r600_mark_atom_dirty(ctx, &ctx->b.render_cond_atom); if (ctx->blend_state.cso) r600_mark_atom_dirty(ctx, &ctx->blend_state.atom); if (ctx->dsa_state.cso) r600_mark_atom_dirty(ctx, &ctx->dsa_state.atom); if (ctx->rasterizer_state.cso) r600_mark_atom_dirty(ctx, &ctx->rasterizer_state.atom); if (ctx->b.chip_class <= R700) { r600_mark_atom_dirty(ctx, &ctx->seamless_cube_map.atom); } ctx->vertex_buffer_state.dirty_mask = ctx->vertex_buffer_state.enabled_mask; r600_vertex_buffers_dirty(ctx); /* Re-emit shader resources. */ for (shader = 0; shader < PIPE_SHADER_TYPES; shader++) { struct r600_constbuf_state *constbuf = &ctx->constbuf_state[shader]; struct r600_textures_info *samplers = &ctx->samplers[shader]; constbuf->dirty_mask = constbuf->enabled_mask; samplers->views.dirty_mask = samplers->views.enabled_mask; samplers->states.dirty_mask = samplers->states.enabled_mask; r600_constant_buffers_dirty(ctx, constbuf); r600_sampler_views_dirty(ctx, &samplers->views); r600_sampler_states_dirty(ctx, &samplers->states); } for (shader = 0; shader < ARRAY_SIZE(ctx->scratch_buffers); shader++) { ctx->scratch_buffers[shader].dirty = true; } r600_postflush_resume_features(&ctx->b); /* Re-emit the draw state. */ ctx->last_primitive_type = -1; ctx->last_start_instance = -1; ctx->last_rast_prim = -1; ctx->current_rast_prim = -1; assert(!ctx->b.gfx.cs->prev_dw); ctx->b.initial_gfx_cs_size = ctx->b.gfx.cs->current.cdw; }