static unsigned ni_tio_clock_src_modifiers(const struct ni_gpct *counter) { struct ni_gpct_device *counter_dev = counter->counter_dev; const unsigned counting_mode_bits = ni_tio_get_soft_copy(counter, NITIO_Gi_Counting_Mode_Reg(counter->counter_index)); unsigned bits = 0; if (ni_tio_get_soft_copy(counter, NITIO_Gi_Input_Select_Reg(counter-> counter_index)) & Gi_Source_Polarity_Bit) bits |= NI_GPCT_INVERT_CLOCK_SRC_BIT; if (counting_mode_bits & Gi_Prescale_X2_Bit(counter_dev->variant)) bits |= NI_GPCT_PRESCALE_X2_CLOCK_SRC_BITS; if (counting_mode_bits & Gi_Prescale_X8_Bit(counter_dev->variant)) bits |= NI_GPCT_PRESCALE_X8_CLOCK_SRC_BITS; return bits; }
static void ni_tio_acknowledge_and_confirm(struct ni_gpct *counter, int *gate_error, int *tc_error, int *perm_stale_data) { unsigned int cidx = counter->counter_index; const unsigned short gxx_status = ni_tio_read(counter, NITIO_SHARED_STATUS_REG(cidx)); const unsigned short gi_status = ni_tio_read(counter, NITIO_STATUS_REG(cidx)); unsigned int ack = 0; if (gate_error) *gate_error = 0; if (tc_error) *tc_error = 0; if (perm_stale_data) *perm_stale_data = 0; if (gxx_status & GI_GATE_ERROR(cidx)) { ack |= GI_GATE_ERROR_CONFIRM(cidx); if (gate_error) { /* * 660x don't support automatic acknowledgment * of gate interrupt via dma read/write * and report bogus gate errors */ if (counter->counter_dev->variant != ni_gpct_variant_660x) *gate_error = 1; } } if (gxx_status & GI_TC_ERROR(cidx)) { ack |= GI_TC_ERROR_CONFIRM(cidx); if (tc_error) *tc_error = 1; } if (gi_status & GI_TC) ack |= GI_TC_INTERRUPT_ACK; if (gi_status & GI_GATE_INTERRUPT) { if (should_ack_gate(counter)) ack |= GI_GATE_INTERRUPT_ACK; } if (ack) ni_tio_write(counter, ack, NITIO_INT_ACK_REG(cidx)); if (ni_tio_get_soft_copy(counter, NITIO_MODE_REG(cidx)) & GI_LOADING_ON_GATE) { if (ni_tio_read(counter, NITIO_STATUS2_REG(cidx)) & GI_PERMANENT_STALE(cidx)) { dev_info(counter->counter_dev->dev->class_dev, "%s: Gi_Permanent_Stale_Data detected.\n", __func__); if (perm_stale_data) *perm_stale_data = 1; } } }
static unsigned ni_660x_clock_src_select(const struct ni_gpct *counter) { unsigned clock_source = 0; unsigned i; const unsigned input_select = (ni_tio_get_soft_copy(counter, NITIO_Gi_Input_Select_Reg (counter->counter_index)) & Gi_Source_Select_Mask) >> Gi_Source_Select_Shift; switch (input_select) { case NI_660x_Timebase_1_Clock: clock_source = NI_GPCT_TIMEBASE_1_CLOCK_SRC_BITS; break; case NI_660x_Timebase_2_Clock: clock_source = NI_GPCT_TIMEBASE_2_CLOCK_SRC_BITS; break; case NI_660x_Timebase_3_Clock: clock_source = NI_GPCT_TIMEBASE_3_CLOCK_SRC_BITS; break; case NI_660x_Logic_Low_Clock: clock_source = NI_GPCT_LOGIC_LOW_CLOCK_SRC_BITS; break; case NI_660x_Source_Pin_i_Clock: clock_source = NI_GPCT_SOURCE_PIN_i_CLOCK_SRC_BITS; break; case NI_660x_Next_Gate_Clock: clock_source = NI_GPCT_NEXT_GATE_CLOCK_SRC_BITS; break; case NI_660x_Next_TC_Clock: clock_source = NI_GPCT_NEXT_TC_CLOCK_SRC_BITS; break; default: for (i = 0; i <= ni_660x_max_rtsi_channel; ++i) { if (input_select == NI_660x_RTSI_Clock(i)) { clock_source = NI_GPCT_RTSI_CLOCK_SRC_BITS(i); break; } } if (i <= ni_660x_max_rtsi_channel) break; for (i = 0; i <= ni_660x_max_source_pin; ++i) { if (input_select == NI_660x_Source_Pin_Clock(i)) { clock_source = NI_GPCT_SOURCE_PIN_CLOCK_SRC_BITS(i); break; } } if (i <= ni_660x_max_source_pin) break; BUG(); break; } clock_source |= ni_tio_clock_src_modifiers(counter); return clock_source; }
static void ni_tio_set_sync_mode(struct ni_gpct *counter, int force_alt_sync) { struct ni_gpct_device *counter_dev = counter->counter_dev; const unsigned counting_mode_reg = NITIO_Gi_Counting_Mode_Reg(counter->counter_index); static const uint64_t min_normal_sync_period_ps = 25000; const uint64_t clock_period_ps = ni_tio_clock_period_ps(counter, ni_tio_generic_clock_src_select(counter)); if (ni_tio_counting_mode_registers_present(counter_dev) == 0) return; switch (ni_tio_get_soft_copy(counter, counting_mode_reg) & Gi_Counting_Mode_Mask) { case Gi_Counting_Mode_QuadratureX1_Bits: case Gi_Counting_Mode_QuadratureX2_Bits: case Gi_Counting_Mode_QuadratureX4_Bits: case Gi_Counting_Mode_Sync_Source_Bits: force_alt_sync = 1; break; default: break; } /* It's not clear what we should do if clock_period is unknown, so we are not using the alt sync bit in that case, but allow the caller to decide by using the force_alt_sync parameter. */ if (force_alt_sync || (clock_period_ps && clock_period_ps < min_normal_sync_period_ps)) { ni_tio_set_bits(counter, counting_mode_reg, Gi_Alternate_Sync_Bit(counter_dev->variant), Gi_Alternate_Sync_Bit(counter_dev->variant)); } else { ni_tio_set_bits(counter, counting_mode_reg, Gi_Alternate_Sync_Bit(counter_dev->variant), 0x0); } }
static void ni_tio_set_sync_mode(struct ni_gpct *counter, int force_alt_sync) { struct ni_gpct_device *counter_dev = counter->counter_dev; const unsigned counting_mode_reg = NITIO_Gi_Counting_Mode_Reg(counter->counter_index); static const uint64_t min_normal_sync_period_ps = 25000; const uint64_t clock_period_ps = ni_tio_clock_period_ps(counter, ni_tio_generic_clock_src_select (counter)); if (ni_tio_counting_mode_registers_present(counter_dev) == 0) return; switch (ni_tio_get_soft_copy(counter, counting_mode_reg) & Gi_Counting_Mode_Mask) { case Gi_Counting_Mode_QuadratureX1_Bits: case Gi_Counting_Mode_QuadratureX2_Bits: case Gi_Counting_Mode_QuadratureX4_Bits: case Gi_Counting_Mode_Sync_Source_Bits: force_alt_sync = 1; break; default: break; } if (force_alt_sync || (clock_period_ps && clock_period_ps < min_normal_sync_period_ps)) { ni_tio_set_bits(counter, counting_mode_reg, Gi_Alternate_Sync_Bit(counter_dev->variant), Gi_Alternate_Sync_Bit(counter_dev->variant)); } else { ni_tio_set_bits(counter, counting_mode_reg, Gi_Alternate_Sync_Bit(counter_dev->variant), 0x0); } }
void ni_tio_acknowledge_and_confirm(struct ni_gpct *counter, int *gate_error, int *tc_error, int *perm_stale_data, int *stale_data) { const unsigned short gxx_status = read_register(counter, NITIO_Gxx_Status_Reg(counter->counter_index)); const unsigned short gi_status = read_register(counter, NITIO_Gi_Status_Reg(counter->counter_index)); unsigned ack = 0; if (gate_error) *gate_error = 0; if (tc_error) *tc_error = 0; if (perm_stale_data) *perm_stale_data = 0; if (stale_data) *stale_data = 0; if (gxx_status & Gi_Gate_Error_Bit(counter->counter_index)) { ack |= Gi_Gate_Error_Confirm_Bit(counter->counter_index); if (gate_error) { /*660x don't support automatic acknowledgement of gate interrupt via dma read/write and report bogus gate errors */ if (counter->counter_dev->variant != ni_gpct_variant_660x) { *gate_error = 1; } } } if (gxx_status & Gi_TC_Error_Bit(counter->counter_index)) { ack |= Gi_TC_Error_Confirm_Bit(counter->counter_index); if (tc_error) *tc_error = 1; } if (gi_status & Gi_TC_Bit) { ack |= Gi_TC_Interrupt_Ack_Bit; } if (gi_status & Gi_Gate_Interrupt_Bit) { if (should_ack_gate(counter)) ack |= Gi_Gate_Interrupt_Ack_Bit; } if (ack) write_register(counter, ack, NITIO_Gi_Interrupt_Acknowledge_Reg(counter-> counter_index)); if (ni_tio_get_soft_copy(counter, NITIO_Gi_Mode_Reg(counter-> counter_index)) & Gi_Loading_On_Gate_Bit) { if (gxx_status & Gi_Stale_Data_Bit(counter->counter_index)) { if (stale_data) *stale_data = 1; } if (read_register(counter, NITIO_Gxx_Joint_Status2_Reg(counter-> counter_index)) & Gi_Permanent_Stale_Bit(counter->counter_index)) { rt_printk("%s: Gi_Permanent_Stale_Data detected.\n", __FUNCTION__); if (perm_stale_data) *perm_stale_data = 1; } } }
static unsigned ni_m_series_clock_src_select(const struct ni_gpct *counter) { struct ni_gpct_device *counter_dev = counter->counter_dev; const unsigned second_gate_reg = NITIO_Gi_Second_Gate_Reg(counter->counter_index); unsigned clock_source = 0; unsigned i; const unsigned input_select = (ni_tio_get_soft_copy(counter, NITIO_Gi_Input_Select_Reg(counter-> counter_index)) & Gi_Source_Select_Mask) >> Gi_Source_Select_Shift; switch (input_select) { case NI_M_Series_Timebase_1_Clock: clock_source = NI_GPCT_TIMEBASE_1_CLOCK_SRC_BITS; break; case NI_M_Series_Timebase_2_Clock: clock_source = NI_GPCT_TIMEBASE_2_CLOCK_SRC_BITS; break; case NI_M_Series_Timebase_3_Clock: if (counter_dev-> regs[second_gate_reg] & Gi_Source_Subselect_Bit) clock_source = NI_GPCT_ANALOG_TRIGGER_OUT_CLOCK_SRC_BITS; else clock_source = NI_GPCT_TIMEBASE_3_CLOCK_SRC_BITS; break; case NI_M_Series_Logic_Low_Clock: clock_source = NI_GPCT_LOGIC_LOW_CLOCK_SRC_BITS; break; case NI_M_Series_Next_Gate_Clock: if (counter_dev-> regs[second_gate_reg] & Gi_Source_Subselect_Bit) clock_source = NI_GPCT_PXI_STAR_TRIGGER_CLOCK_SRC_BITS; else clock_source = NI_GPCT_NEXT_GATE_CLOCK_SRC_BITS; break; case NI_M_Series_PXI10_Clock: clock_source = NI_GPCT_PXI10_CLOCK_SRC_BITS; break; case NI_M_Series_Next_TC_Clock: clock_source = NI_GPCT_NEXT_TC_CLOCK_SRC_BITS; break; default: for (i = 0; i <= ni_m_series_max_rtsi_channel; ++i) { if (input_select == NI_M_Series_RTSI_Clock(i)) { clock_source = NI_GPCT_RTSI_CLOCK_SRC_BITS(i); break; } } if (i <= ni_m_series_max_rtsi_channel) break; for (i = 0; i <= ni_m_series_max_pfi_channel; ++i) { if (input_select == NI_M_Series_PFI_Clock(i)) { clock_source = NI_GPCT_PFI_CLOCK_SRC_BITS(i); break; } } if (i <= ni_m_series_max_pfi_channel) break; BUG(); break; } clock_source |= ni_tio_clock_src_modifiers(counter); return clock_source; }
static int ni_tio_get_gate_src(struct ni_gpct *counter, unsigned gate_index, lsampl_t * gate_source) { struct ni_gpct_device *counter_dev = counter->counter_dev; const unsigned mode_bits = ni_tio_get_soft_copy(counter, NITIO_Gi_Mode_Reg(counter->counter_index)); const unsigned second_gate_reg = NITIO_Gi_Second_Gate_Reg(counter->counter_index); unsigned gate_select_bits; switch (gate_index) { case 0: if ((mode_bits & Gi_Gating_Mode_Mask) == Gi_Gating_Disabled_Bits) { *gate_source = NI_GPCT_DISABLED_GATE_SELECT; return 0; } else { gate_select_bits = (ni_tio_get_soft_copy(counter, NITIO_Gi_Input_Select_Reg(counter-> counter_index)) & Gi_Gate_Select_Mask) >> Gi_Gate_Select_Shift; } switch (counter_dev->variant) { case ni_gpct_variant_e_series: case ni_gpct_variant_m_series: *gate_source = ni_m_series_first_gate_to_generic_gate_source (gate_select_bits); break; case ni_gpct_variant_660x: *gate_source = ni_660x_first_gate_to_generic_gate_source (gate_select_bits); break; default: BUG(); break; } if (mode_bits & Gi_Gate_Polarity_Bit) { *gate_source |= CR_INVERT; } if ((mode_bits & Gi_Gating_Mode_Mask) != Gi_Level_Gating_Bits) { *gate_source |= CR_EDGE; } break; case 1: if ((mode_bits & Gi_Gating_Mode_Mask) == Gi_Gating_Disabled_Bits || (counter_dev-> regs[second_gate_reg] & Gi_Second_Gate_Mode_Bit) == 0) { *gate_source = NI_GPCT_DISABLED_GATE_SELECT; return 0; } else { gate_select_bits = (counter_dev-> regs[second_gate_reg] & Gi_Second_Gate_Select_Mask) >> Gi_Second_Gate_Select_Shift; } switch (counter_dev->variant) { case ni_gpct_variant_e_series: case ni_gpct_variant_m_series: *gate_source = ni_m_series_second_gate_to_generic_gate_source (gate_select_bits); break; case ni_gpct_variant_660x: *gate_source = ni_660x_second_gate_to_generic_gate_source (gate_select_bits); break; default: BUG(); break; } if (counter_dev-> regs[second_gate_reg] & Gi_Second_Gate_Polarity_Bit) { *gate_source |= CR_INVERT; } /* second gate can't have edge/level mode set independently */ if ((mode_bits & Gi_Gating_Mode_Mask) != Gi_Level_Gating_Bits) { *gate_source |= CR_EDGE; } break; default: return -EINVAL; break; } return 0; }
void ni_tio_acknowledge_and_confirm(struct ni_gpct *counter, int *gate_error, int *tc_error, int *perm_stale_data, int *stale_data) { unsigned cidx = counter->counter_index; const unsigned short gxx_status = read_register(counter, NITIO_SHARED_STATUS_REG(cidx)); const unsigned short gi_status = read_register(counter, NITIO_STATUS_REG(cidx)); unsigned ack = 0; if (gate_error) *gate_error = 0; if (tc_error) *tc_error = 0; if (perm_stale_data) *perm_stale_data = 0; if (stale_data) *stale_data = 0; if (gxx_status & Gi_Gate_Error_Bit(cidx)) { ack |= Gi_Gate_Error_Confirm_Bit(cidx); if (gate_error) { /*660x don't support automatic acknowledgement of gate interrupt via dma read/write and report bogus gate errors */ if (counter->counter_dev->variant != ni_gpct_variant_660x) { *gate_error = 1; } } } if (gxx_status & Gi_TC_Error_Bit(cidx)) { ack |= Gi_TC_Error_Confirm_Bit(cidx); if (tc_error) *tc_error = 1; } if (gi_status & Gi_TC_Bit) ack |= Gi_TC_Interrupt_Ack_Bit; if (gi_status & Gi_Gate_Interrupt_Bit) { if (should_ack_gate(counter)) ack |= Gi_Gate_Interrupt_Ack_Bit; } if (ack) write_register(counter, ack, NITIO_INT_ACK_REG(cidx)); if (ni_tio_get_soft_copy(counter, NITIO_MODE_REG(cidx)) & Gi_Loading_On_Gate_Bit) { if (gxx_status & Gi_Stale_Data_Bit(cidx)) { if (stale_data) *stale_data = 1; } if (read_register(counter, NITIO_STATUS2_REG(cidx)) & Gi_Permanent_Stale_Bit(cidx)) { dev_info(counter->counter_dev->dev->class_dev, "%s: Gi_Permanent_Stale_Data detected.\n", __func__); if (perm_stale_data) *perm_stale_data = 1; } } }
void ni_tio_acknowledge_and_confirm(struct ni_gpct *counter, int *gate_error, int *tc_error, int *perm_stale_data, int *stale_data) { const unsigned short gxx_status = read_register(counter, NITIO_Gxx_Status_Reg (counter-> counter_index)); const unsigned short gi_status = read_register(counter, NITIO_Gi_Status_Reg (counter-> counter_index)); unsigned ack = 0; if (gate_error) *gate_error = 0; if (tc_error) *tc_error = 0; if (perm_stale_data) *perm_stale_data = 0; if (stale_data) *stale_data = 0; if (gxx_status & Gi_Gate_Error_Bit(counter->counter_index)) { ack |= Gi_Gate_Error_Confirm_Bit(counter->counter_index); if (gate_error) { if (counter->counter_dev->variant != ni_gpct_variant_660x) { *gate_error = 1; } } } if (gxx_status & Gi_TC_Error_Bit(counter->counter_index)) { ack |= Gi_TC_Error_Confirm_Bit(counter->counter_index); if (tc_error) *tc_error = 1; } if (gi_status & Gi_TC_Bit) ack |= Gi_TC_Interrupt_Ack_Bit; if (gi_status & Gi_Gate_Interrupt_Bit) { if (should_ack_gate(counter)) ack |= Gi_Gate_Interrupt_Ack_Bit; } if (ack) write_register(counter, ack, NITIO_Gi_Interrupt_Acknowledge_Reg (counter->counter_index)); if (ni_tio_get_soft_copy (counter, NITIO_Gi_Mode_Reg(counter->counter_index)) & Gi_Loading_On_Gate_Bit) { if (gxx_status & Gi_Stale_Data_Bit(counter->counter_index)) { if (stale_data) *stale_data = 1; } if (read_register(counter, NITIO_Gxx_Joint_Status2_Reg (counter->counter_index)) & Gi_Permanent_Stale_Bit(counter->counter_index)) { printk(KERN_INFO "%s: Gi_Permanent_Stale_Data detected.\n", __func__); if (perm_stale_data) *perm_stale_data = 1; } } }