Ejemplo n.º 1
0
void cayman_emit_msaa_config(struct radeon_winsys_cs *cs, int nr_samples,
			     int ps_iter_samples, int overrast_samples)
{
	int setup_samples = nr_samples > 1 ? nr_samples :
			    overrast_samples > 1 ? overrast_samples : 0;

	if (setup_samples > 1) {
		/* indexed by log2(nr_samples) */
		unsigned max_dist[] = {
			0,
			eg_max_dist_2x,
			eg_max_dist_4x,
			cm_max_dist_8x,
			cm_max_dist_16x
		};
		unsigned log_samples = util_logbase2(setup_samples);
		unsigned log_ps_iter_samples =
			util_logbase2(util_next_power_of_two(ps_iter_samples));

		radeon_set_context_reg_seq(cs, CM_R_028BDC_PA_SC_LINE_CNTL, 2);
		radeon_emit(cs, S_028BDC_LAST_PIXEL(1) |
			    S_028BDC_EXPAND_LINE_WIDTH(1)); /* CM_R_028BDC_PA_SC_LINE_CNTL */
		radeon_emit(cs, S_028BE0_MSAA_NUM_SAMPLES(log_samples) |
			    S_028BE0_MAX_SAMPLE_DIST(max_dist[log_samples]) |
			    S_028BE0_MSAA_EXPOSED_SAMPLES(log_samples)); /* CM_R_028BE0_PA_SC_AA_CONFIG */

		if (nr_samples > 1) {
			radeon_set_context_reg(cs, CM_R_028804_DB_EQAA,
					       S_028804_MAX_ANCHOR_SAMPLES(log_samples) |
					       S_028804_PS_ITER_SAMPLES(log_ps_iter_samples) |
					       S_028804_MASK_EXPORT_NUM_SAMPLES(log_samples) |
					       S_028804_ALPHA_TO_MASK_NUM_SAMPLES(log_samples) |
					       S_028804_HIGH_QUALITY_INTERSECTIONS(1) |
					       S_028804_STATIC_ANCHOR_ASSOCIATIONS(1));
			radeon_set_context_reg(cs, EG_R_028A4C_PA_SC_MODE_CNTL_1,
					     EG_S_028A4C_PS_ITER_SAMPLE(ps_iter_samples > 1));
		} else if (overrast_samples > 1) {
			radeon_set_context_reg(cs, CM_R_028804_DB_EQAA,
					       S_028804_HIGH_QUALITY_INTERSECTIONS(1) |
					       S_028804_STATIC_ANCHOR_ASSOCIATIONS(1) |
					       S_028804_OVERRASTERIZATION_AMOUNT(log_samples));
			radeon_set_context_reg(cs, EG_R_028A4C_PA_SC_MODE_CNTL_1, 0);
		}
	} else {
		radeon_set_context_reg_seq(cs, CM_R_028BDC_PA_SC_LINE_CNTL, 2);
		radeon_emit(cs, S_028BDC_LAST_PIXEL(1)); /* CM_R_028BDC_PA_SC_LINE_CNTL */
		radeon_emit(cs, 0); /* CM_R_028BE0_PA_SC_AA_CONFIG */

		radeon_set_context_reg(cs, CM_R_028804_DB_EQAA,
				       S_028804_HIGH_QUALITY_INTERSECTIONS(1) |
				       S_028804_STATIC_ANCHOR_ASSOCIATIONS(1));
		radeon_set_context_reg(cs, EG_R_028A4C_PA_SC_MODE_CNTL_1, 0);
	}
}
Ejemplo n.º 2
0
void cayman_emit_msaa_sample_locs(struct radeon_winsys_cs *cs, int nr_samples)
{
	switch (nr_samples) {
	case 2:
		radeon_set_context_reg(cs, CM_R_028BF8_PA_SC_AA_SAMPLE_LOCS_PIXEL_X0Y0_0, eg_sample_locs_2x[0]);
		radeon_set_context_reg(cs, CM_R_028C08_PA_SC_AA_SAMPLE_LOCS_PIXEL_X1Y0_0, eg_sample_locs_2x[1]);
		radeon_set_context_reg(cs, CM_R_028C18_PA_SC_AA_SAMPLE_LOCS_PIXEL_X0Y1_0, eg_sample_locs_2x[2]);
		radeon_set_context_reg(cs, CM_R_028C28_PA_SC_AA_SAMPLE_LOCS_PIXEL_X1Y1_0, eg_sample_locs_2x[3]);
		break;
	case 4:
		radeon_set_context_reg(cs, CM_R_028BF8_PA_SC_AA_SAMPLE_LOCS_PIXEL_X0Y0_0, eg_sample_locs_4x[0]);
		radeon_set_context_reg(cs, CM_R_028C08_PA_SC_AA_SAMPLE_LOCS_PIXEL_X1Y0_0, eg_sample_locs_4x[1]);
		radeon_set_context_reg(cs, CM_R_028C18_PA_SC_AA_SAMPLE_LOCS_PIXEL_X0Y1_0, eg_sample_locs_4x[2]);
		radeon_set_context_reg(cs, CM_R_028C28_PA_SC_AA_SAMPLE_LOCS_PIXEL_X1Y1_0, eg_sample_locs_4x[3]);
		break;
	case 8:
		radeon_set_context_reg_seq(cs, CM_R_028BF8_PA_SC_AA_SAMPLE_LOCS_PIXEL_X0Y0_0, 14);
		radeon_emit(cs, cm_sample_locs_8x[0]);
		radeon_emit(cs, cm_sample_locs_8x[4]);
		radeon_emit(cs, 0);
		radeon_emit(cs, 0);
		radeon_emit(cs, cm_sample_locs_8x[1]);
		radeon_emit(cs, cm_sample_locs_8x[5]);
		radeon_emit(cs, 0);
		radeon_emit(cs, 0);
		radeon_emit(cs, cm_sample_locs_8x[2]);
		radeon_emit(cs, cm_sample_locs_8x[6]);
		radeon_emit(cs, 0);
		radeon_emit(cs, 0);
		radeon_emit(cs, cm_sample_locs_8x[3]);
		radeon_emit(cs, cm_sample_locs_8x[7]);
		break;
	case 16:
		radeon_set_context_reg_seq(cs, CM_R_028BF8_PA_SC_AA_SAMPLE_LOCS_PIXEL_X0Y0_0, 16);
		radeon_emit(cs, cm_sample_locs_16x[0]);
		radeon_emit(cs, cm_sample_locs_16x[4]);
		radeon_emit(cs, cm_sample_locs_16x[8]);
		radeon_emit(cs, cm_sample_locs_16x[12]);
		radeon_emit(cs, cm_sample_locs_16x[1]);
		radeon_emit(cs, cm_sample_locs_16x[5]);
		radeon_emit(cs, cm_sample_locs_16x[9]);
		radeon_emit(cs, cm_sample_locs_16x[13]);
		radeon_emit(cs, cm_sample_locs_16x[2]);
		radeon_emit(cs, cm_sample_locs_16x[6]);
		radeon_emit(cs, cm_sample_locs_16x[10]);
		radeon_emit(cs, cm_sample_locs_16x[14]);
		radeon_emit(cs, cm_sample_locs_16x[3]);
		radeon_emit(cs, cm_sample_locs_16x[7]);
		radeon_emit(cs, cm_sample_locs_16x[11]);
		radeon_emit(cs, cm_sample_locs_16x[15]);
		break;
	}
}
Ejemplo n.º 3
0
void
si_write_viewport(struct radeon_winsys_cs *cs, int first_vp,
                  int count, const VkViewport *viewports)
{
	int i;

	if (count == 0) {
		radeon_set_context_reg_seq(cs, R_02843C_PA_CL_VPORT_XSCALE, 6);
		radeon_emit(cs, fui(1.0));
		radeon_emit(cs, fui(0.0));
		radeon_emit(cs, fui(1.0));
		radeon_emit(cs, fui(0.0));
		radeon_emit(cs, fui(1.0));
		radeon_emit(cs, fui(0.0));

		radeon_set_context_reg_seq(cs, R_0282D0_PA_SC_VPORT_ZMIN_0, 2);
		radeon_emit(cs, fui(0.0));
		radeon_emit(cs, fui(1.0));

		return;
	}
	radeon_set_context_reg_seq(cs, R_02843C_PA_CL_VPORT_XSCALE +
				   first_vp * 4 * 6, count * 6);

	for (i = 0; i < count; i++) {
		float scale[3], translate[3];


		get_viewport_xform(&viewports[i], scale, translate);
		radeon_emit(cs, fui(scale[0]));
		radeon_emit(cs, fui(translate[0]));
		radeon_emit(cs, fui(scale[1]));
		radeon_emit(cs, fui(translate[1]));
		radeon_emit(cs, fui(scale[2]));
		radeon_emit(cs, fui(translate[2]));
	}

	for (i = 0; i < count; i++) {
		float zmin = MIN2(viewports[i].minDepth, viewports[i].maxDepth);
		float zmax = MAX2(viewports[i].minDepth, viewports[i].maxDepth);
		radeon_set_context_reg_seq(cs, R_0282D0_PA_SC_VPORT_ZMIN_0 +
					   first_vp * 4 * 2, count * 2);
		radeon_emit(cs, fui(zmin));
		radeon_emit(cs, fui(zmax));
	}
}
Ejemplo n.º 4
0
void
si_write_scissors(struct radeon_winsys_cs *cs, int first,
                  int count, const VkRect2D *scissors)
{
	int i;
	if (count == 0)
		return;

	radeon_set_context_reg_seq(cs, R_028250_PA_SC_VPORT_SCISSOR_0_TL + first * 4 * 2, count * 2);
	for (i = 0; i < count; i++) {
		radeon_emit(cs, S_028250_TL_X(scissors[i].offset.x) |
			    S_028250_TL_Y(scissors[i].offset.y) |
			    S_028250_WINDOW_OFFSET_DISABLE(1));
		radeon_emit(cs, S_028254_BR_X(scissors[i].offset.x + scissors[i].extent.width) |
			    S_028254_BR_Y(scissors[i].offset.y + scissors[i].extent.height));
	}
}
Ejemplo n.º 5
0
static void r600_emit_streamout_begin(struct r600_common_context *rctx, struct r600_atom *atom)
{
	struct radeon_winsys_cs *cs = rctx->gfx.cs;
	struct r600_so_target **t = rctx->streamout.targets;
	uint16_t *stride_in_dw = rctx->streamout.stride_in_dw;
	unsigned i, update_flags = 0;

	r600_flush_vgt_streamout(rctx);

	for (i = 0; i < rctx->streamout.num_targets; i++) {
		if (!t[i])
			continue;

		t[i]->stride_in_dw = stride_in_dw[i];

		if (rctx->chip_class >= SI) {
			/* SI binds streamout buffers as shader resources.
			 * VGT only counts primitives and tells the shader
			 * through SGPRs what to do. */
			radeon_set_context_reg_seq(cs, R_028AD0_VGT_STRMOUT_BUFFER_SIZE_0 + 16*i, 2);
			radeon_emit(cs, (t[i]->b.buffer_offset +
					 t[i]->b.buffer_size) >> 2);	/* BUFFER_SIZE (in DW) */
			radeon_emit(cs, stride_in_dw[i]);		/* VTX_STRIDE (in DW) */
		} else {