static bool analogix_dp_detect_sink_psr(struct analogix_dp_device *dp) { unsigned char psr_version; drm_dp_dpcd_readb(&dp->aux, DP_PSR_SUPPORT, &psr_version); dev_dbg(dp->dev, "Panel PSR version : %x\n", psr_version); return (psr_version & DP_PSR_IS_SUPPORTED) ? true : false; }
static void analogix_dp_enable_scramble(struct analogix_dp_device *dp, bool enable) { u8 data; if (enable) { analogix_dp_enable_scrambling(dp); drm_dp_dpcd_readb(&dp->aux, DP_TRAINING_PATTERN_SET, &data); drm_dp_dpcd_writeb(&dp->aux, DP_TRAINING_PATTERN_SET, (u8)(data & ~DP_LINK_SCRAMBLING_DISABLE)); } else { analogix_dp_disable_scrambling(dp); drm_dp_dpcd_readb(&dp->aux, DP_TRAINING_PATTERN_SET, &data); drm_dp_dpcd_writeb(&dp->aux, DP_TRAINING_PATTERN_SET, (u8)(data | DP_LINK_SCRAMBLING_DISABLE)); } }
static int analogix_dp_is_enhanced_mode_available(struct analogix_dp_device *dp) { u8 data; int retval; drm_dp_dpcd_readb(&dp->aux, DP_MAX_LANE_COUNT, &data); retval = DPCD_ENHANCED_FRAME_CAP(data); return retval; }
int radeon_dp_get_panel_mode(struct drm_encoder *encoder, struct drm_connector *connector) { struct drm_device *dev = encoder->dev; struct radeon_device *rdev = dev->dev_private; struct radeon_connector *radeon_connector = to_radeon_connector(connector); struct radeon_connector_atom_dig *dig_connector; int panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE; u16 dp_bridge = radeon_connector_encoder_get_dp_bridge_encoder_id(connector); u8 tmp; if (!ASIC_IS_DCE4(rdev)) return panel_mode; if (!radeon_connector->con_priv) return panel_mode; dig_connector = radeon_connector->con_priv; if (dp_bridge != ENCODER_OBJECT_ID_NONE) { /* DP bridge chips */ if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux, DP_EDP_CONFIGURATION_CAP, &tmp) == 1) { if (tmp & 1) panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE; else if ((dp_bridge == ENCODER_OBJECT_ID_NUTMEG) || (dp_bridge == ENCODER_OBJECT_ID_TRAVIS)) panel_mode = DP_PANEL_MODE_INTERNAL_DP1_MODE; else panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE; } } else if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) { /* eDP */ if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux, DP_EDP_CONFIGURATION_CAP, &tmp) == 1) { if (tmp & 1) panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE; } } return panel_mode; }
static bool drm_dp_cec_cap(struct drm_dp_aux *aux, u8 *cec_cap) { u8 cap = 0; if (drm_dp_dpcd_readb(aux, DP_CEC_TUNNELING_CAPABILITY, &cap) != 1 || !(cap & DP_CEC_TUNNELING_CAPABLE)) return false; if (cec_cap) *cec_cap = cap; return true; }
static void analogix_dp_get_max_rx_lane_count(struct analogix_dp_device *dp, u8 *lane_count) { u8 data; /* * For DP rev.1.1, Maximum number of Main Link lanes * 0x01 = 1 lane, 0x02 = 2 lanes, 0x04 = 4 lanes */ drm_dp_dpcd_readb(&dp->aux, DP_MAX_LANE_COUNT, &data); *lane_count = DPCD_MAX_LANE_COUNT(data); }
static void analogix_dp_get_max_rx_bandwidth(struct analogix_dp_device *dp, u8 *bandwidth) { u8 data; /* * For DP rev.1.1, Maximum link rate of Main Link lanes * 0x06 = 1.62 Gbps, 0x0a = 2.7 Gbps * For DP rev.1.2, Maximum link rate of Main Link lanes * 0x06 = 1.62 Gbps, 0x0a = 2.7 Gbps, 0x14 = 5.4Gbps */ drm_dp_dpcd_readb(&dp->aux, DP_MAX_LINK_RATE, &data); *bandwidth = data; }
static bool lspcon_wake_native_aux_ch(struct intel_lspcon *lspcon) { uint8_t rev; if (drm_dp_dpcd_readb(&lspcon_to_intel_dp(lspcon)->aux, DP_DPCD_REV, &rev) != 1) { DRM_DEBUG_KMS("Native AUX CH down\n"); return false; } DRM_DEBUG_KMS("Native AUX CH up, DPCD version: %d.%d\n", rev >> 4, rev & 0xf); return true; }
static void analogix_dp_enable_rx_to_enhanced_mode(struct analogix_dp_device *dp, bool enable) { u8 data; drm_dp_dpcd_readb(&dp->aux, DP_LANE_COUNT_SET, &data); if (enable) drm_dp_dpcd_writeb(&dp->aux, DP_LANE_COUNT_SET, DP_LANE_COUNT_ENHANCED_FRAME_EN | DPCD_LANE_COUNT_SET(data)); else drm_dp_dpcd_writeb(&dp->aux, DP_LANE_COUNT_SET, DPCD_LANE_COUNT_SET(data)); }
/** * drm_dp_cec_irq() - handle CEC interrupt, if any * @aux: DisplayPort AUX channel * * Should be called when handling an IRQ_HPD request. If CEC-tunneling-over-AUX * is present, then it will check for a CEC_IRQ and handle it accordingly. */ void drm_dp_cec_irq(struct drm_dp_aux *aux) { u8 cec_irq; int ret; mutex_lock(&aux->cec.lock); if (!aux->cec.adap) goto unlock; ret = drm_dp_dpcd_readb(aux, DP_DEVICE_SERVICE_IRQ_VECTOR_ESI1, &cec_irq); if (ret < 0 || !(cec_irq & DP_CEC_IRQ)) goto unlock; drm_dp_cec_handle_irq(aux); drm_dp_dpcd_writeb(aux, DP_DEVICE_SERVICE_IRQ_VECTOR_ESI1, DP_CEC_IRQ); unlock: mutex_unlock(&aux->cec.lock); }
static void analogix_dp_enable_sink_psr(struct analogix_dp_device *dp) { unsigned char psr_en; /* Disable psr function */ drm_dp_dpcd_readb(&dp->aux, DP_PSR_EN_CFG, &psr_en); psr_en &= ~DP_PSR_ENABLE; drm_dp_dpcd_writeb(&dp->aux, DP_PSR_EN_CFG, psr_en); /* Main-Link transmitter remains active during PSR active states */ psr_en = DP_PSR_MAIN_LINK_ACTIVE | DP_PSR_CRC_VERIFICATION; drm_dp_dpcd_writeb(&dp->aux, DP_PSR_EN_CFG, psr_en); /* Enable psr function */ psr_en = DP_PSR_ENABLE | DP_PSR_MAIN_LINK_ACTIVE | DP_PSR_CRC_VERIFICATION; drm_dp_dpcd_writeb(&dp->aux, DP_PSR_EN_CFG, psr_en); analogix_dp_enable_psr_crc(dp); }
static int drm_dp_cec_adap_monitor_all_enable(struct cec_adapter *adap, bool enable) { struct drm_dp_aux *aux = cec_get_drvdata(adap); ssize_t err; u8 val; if (!(adap->capabilities & CEC_CAP_MONITOR_ALL)) return 0; err = drm_dp_dpcd_readb(aux, DP_CEC_TUNNELING_CONTROL, &val); if (err >= 0) { if (enable) val |= DP_CEC_SNOOPING_ENABLE; else val &= ~DP_CEC_SNOOPING_ENABLE; err = drm_dp_dpcd_writeb(aux, DP_CEC_TUNNELING_CONTROL, val); } return (enable && err < 0) ? err : 0; }
static void set_aux_backlight_enable(struct intel_dp *intel_dp, bool enable) { uint8_t reg_val = 0; if (drm_dp_dpcd_readb(&intel_dp->aux, DP_EDP_DISPLAY_CONTROL_REGISTER, ®_val) < 0) { DRM_DEBUG_KMS("Failed to read DPCD register 0x%x\n", DP_EDP_DISPLAY_CONTROL_REGISTER); return; } if (enable) reg_val |= DP_EDP_BACKLIGHT_ENABLE; else reg_val &= ~(DP_EDP_BACKLIGHT_ENABLE); if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_EDP_DISPLAY_CONTROL_REGISTER, reg_val) != 1) { DRM_DEBUG_KMS("Failed to %s aux backlight\n", enable ? "enable" : "disable"); } }
static int drm_dp_cec_received(struct drm_dp_aux *aux) { struct cec_adapter *adap = aux->cec.adap; struct cec_msg msg; u8 rx_msg_info; ssize_t err; err = drm_dp_dpcd_readb(aux, DP_CEC_RX_MESSAGE_INFO, &rx_msg_info); if (err < 0) return err; if (!(rx_msg_info & DP_CEC_RX_MESSAGE_ENDED)) return 0; msg.len = (rx_msg_info & DP_CEC_RX_MESSAGE_LEN_MASK) + 1; err = drm_dp_dpcd_read(aux, DP_CEC_RX_MESSAGE_BUFFER, msg.msg, msg.len); if (err < 0) return err; cec_received_msg(adap, &msg); return 0; }
static void drm_dp_cec_handle_irq(struct drm_dp_aux *aux) { struct cec_adapter *adap = aux->cec.adap; u8 flags; if (drm_dp_dpcd_readb(aux, DP_CEC_TUNNELING_IRQ_FLAGS, &flags) < 0) return; if (flags & DP_CEC_RX_MESSAGE_INFO_VALID) drm_dp_cec_received(aux); if (flags & DP_CEC_TX_MESSAGE_SENT) cec_transmit_attempt_done(adap, CEC_TX_STATUS_OK); else if (flags & DP_CEC_TX_LINE_ERROR) cec_transmit_attempt_done(adap, CEC_TX_STATUS_ERROR | CEC_TX_STATUS_MAX_RETRIES); else if (flags & (DP_CEC_TX_ADDRESS_NACK_ERROR | DP_CEC_TX_DATA_NACK_ERROR)) cec_transmit_attempt_done(adap, CEC_TX_STATUS_NACK | CEC_TX_STATUS_MAX_RETRIES); drm_dp_dpcd_writeb(aux, DP_CEC_TUNNELING_IRQ_FLAGS, flags); }
/** * drm_dp_cec_irq() - handle CEC interrupt, if any * @aux: DisplayPort AUX channel * * Should be called when handling an IRQ_HPD request. If CEC-tunneling-over-AUX * is present, then it will check for a CEC_IRQ and handle it accordingly. */ void drm_dp_cec_irq(struct drm_dp_aux *aux) { u8 cec_irq; int ret; /* No transfer function was set, so not a DP connector */ if (!aux->transfer) return; mutex_lock(&aux->cec.lock); if (!aux->cec.adap) goto unlock; ret = drm_dp_dpcd_readb(aux, DP_DEVICE_SERVICE_IRQ_VECTOR_ESI1, &cec_irq); if (ret < 0 || !(cec_irq & DP_CEC_IRQ)) goto unlock; drm_dp_cec_handle_irq(aux); drm_dp_dpcd_writeb(aux, DP_DEVICE_SERVICE_IRQ_VECTOR_ESI1, DP_CEC_IRQ); unlock: mutex_unlock(&aux->cec.lock); }
void radeon_dp_link_train(struct drm_encoder *encoder, struct drm_connector *connector) { struct drm_device *dev = encoder->dev; struct radeon_device *rdev = dev->dev_private; struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder); struct radeon_encoder_atom_dig *dig; struct radeon_connector *radeon_connector; struct radeon_connector_atom_dig *dig_connector; struct radeon_dp_link_train_info dp_info; int index; u8 tmp, frev, crev; if (!radeon_encoder->enc_priv) return; dig = radeon_encoder->enc_priv; radeon_connector = to_radeon_connector(connector); if (!radeon_connector->con_priv) return; dig_connector = radeon_connector->con_priv; if ((dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_DISPLAYPORT) && (dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_eDP)) return; /* DPEncoderService newer than 1.1 can't program properly the * training pattern. When facing such version use the * DIGXEncoderControl (X== 1 | 2) */ dp_info.use_dpencoder = true; index = GetIndexIntoMasterTable(COMMAND, DPEncoderService); if (atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) { if (crev > 1) { dp_info.use_dpencoder = false; } } dp_info.enc_id = 0; if (dig->dig_encoder) dp_info.enc_id |= ATOM_DP_CONFIG_DIG2_ENCODER; else dp_info.enc_id |= ATOM_DP_CONFIG_DIG1_ENCODER; if (dig->linkb) dp_info.enc_id |= ATOM_DP_CONFIG_LINK_B; else dp_info.enc_id |= ATOM_DP_CONFIG_LINK_A; if (drm_dp_dpcd_readb(&radeon_connector->ddc_bus->aux, DP_MAX_LANE_COUNT, &tmp) == 1) { if (ASIC_IS_DCE5(rdev) && (tmp & DP_TPS3_SUPPORTED)) dp_info.tp3_supported = true; else dp_info.tp3_supported = false; } else { dp_info.tp3_supported = false; } memcpy(dp_info.dpcd, dig_connector->dpcd, DP_RECEIVER_CAP_SIZE); dp_info.rdev = rdev; dp_info.encoder = encoder; dp_info.connector = connector; dp_info.dp_lane_count = dig_connector->dp_lane_count; dp_info.dp_clock = dig_connector->dp_clock; dp_info.aux = &radeon_connector->ddc_bus->aux; if (radeon_dp_link_train_init(&dp_info)) goto done; if (radeon_dp_link_train_cr(&dp_info)) goto done; if (radeon_dp_link_train_ce(&dp_info)) goto done; done: if (radeon_dp_link_train_finish(&dp_info)) return; }
static int analogix_dp_process_equalizer_training(struct analogix_dp_device *dp) { int lane, lane_count, retval; u32 reg; u8 link_align, link_status[2], adjust_request[2]; usleep_range(400, 401); lane_count = dp->link_train.lane_count; retval = drm_dp_dpcd_read(&dp->aux, DP_LANE0_1_STATUS, link_status, 2); if (retval < 0) return retval; if (analogix_dp_clock_recovery_ok(link_status, lane_count)) { analogix_dp_reduce_link_rate(dp); return -EIO; } retval = drm_dp_dpcd_read(&dp->aux, DP_ADJUST_REQUEST_LANE0_1, adjust_request, 2); if (retval < 0) return retval; retval = drm_dp_dpcd_readb(&dp->aux, DP_LANE_ALIGN_STATUS_UPDATED, &link_align); if (retval < 0) return retval; analogix_dp_get_adjust_training_lane(dp, adjust_request); if (!analogix_dp_channel_eq_ok(link_status, link_align, lane_count)) { /* traing pattern Set to Normal */ analogix_dp_training_pattern_dis(dp); dev_info(dp->dev, "Link Training success!\n"); analogix_dp_get_link_bandwidth(dp, ®); dp->link_train.link_rate = reg; dev_dbg(dp->dev, "final bandwidth = %.2x\n", dp->link_train.link_rate); analogix_dp_get_lane_count(dp, ®); dp->link_train.lane_count = reg; dev_dbg(dp->dev, "final lane count = %.2x\n", dp->link_train.lane_count); /* set enhanced mode if available */ analogix_dp_set_enhanced_mode(dp); dp->link_train.lt_state = FINISHED; return 0; } /* not all locked */ dp->link_train.eq_loop++; if (dp->link_train.eq_loop > MAX_EQ_LOOP) { dev_err(dp->dev, "EQ Max loop\n"); analogix_dp_reduce_link_rate(dp); return -EIO; } for (lane = 0; lane < lane_count; lane++) analogix_dp_set_lane_link_training(dp, dp->link_train.training_lane[lane], lane); retval = drm_dp_dpcd_write(&dp->aux, DP_TRAINING_LANE0_SET, dp->link_train.training_lane, lane_count); if (retval < 0) return retval; return 0; }