Ejemplo n.º 1
0
static void
dec_maxine_intr(uint32_t status, vaddr_t pc, uint32_t ipending)
{
	if (ipending & MIPS_INT_MASK_4)
		prom_haltbutton();

	/* handle clock interrupts ASAP */
	if (ipending & MIPS_INT_MASK_1) {
		struct clockframe cf;

		__asm volatile("lbu $0,48(%0)" ::
			"r"(ioasic_base + IOASIC_SLOT_8_START));
		cf.pc = pc;
		cf.sr = status;
		cf.intr = (curcpu()->ci_idepth > 1);
		hardclock(&cf);
		pmax_clock_evcnt.ev_count++;
	}

	if (ipending & MIPS_INT_MASK_3) {
		dec_maxine_ioasic_intr();
	}
	if (ipending & MIPS_INT_MASK_2) {
		kn02ba_errintr();
		pmax_memerr_evcnt.ev_count++;
	}
}
Ejemplo n.º 2
0
static void
dec_3maxplus_intr(uint32_t status, vaddr_t pc, uint32_t ipending)
{
	unsigned int old_buscycle;

	if (ipending & MIPS_INT_MASK_4)
		prom_haltbutton();

	/* handle clock interrupts ASAP */
	old_buscycle = latched_cycle_cnt;
	if (ipending & MIPS_INT_MASK_1) {
		struct clockframe cf;

		__asm volatile("lbu $0,48(%0)" ::
			"r"(ioasic_base + IOASIC_SLOT_8_START));
		cf.pc = pc;
		cf.sr = status;
		cf.intr = (curcpu()->ci_idepth > 1);
		hardclock(&cf);
		pmax_clock_evcnt.ev_count++;
		old_buscycle = latched_cycle_cnt - old_buscycle;
		/* keep clock interrupts enabled when we return */
	}

#ifdef notdef
	/*
	 * Check for late clock interrupts (allow 10% slop). Be careful
	 * to do so only after calling hardclock(), due to logging cost.
	 * Even then, logging dropped ticks just causes more clock
	 * ticks to be missed.
	 */
	if ((ipending & MIPS_INT_MASK_1) && old_buscycle > (tick+49) * 25) {
		/* XXX need to include <sys/msgbug.h> for msgbufmapped */
		if (msgbufmapped && 0)
			 addlog("kn03: clock intr %d usec late\n",
				 old_buscycle/25);
	}
#endif
	if (ipending & MIPS_INT_MASK_0) {
		dec_3maxplus_ioasic_intr();
	}
	if (ipending & MIPS_INT_MASK_3) {
		dec_3maxplus_errintr();
		pmax_memerr_evcnt.ev_count++;
	}
}
Ejemplo n.º 3
0
static void
dec_5100_intr(uint32_t status, vaddr_t pc, uint32_t ipending)
{
	uint32_t icsr;

	if (ipending & MIPS_INT_MASK_4) {
#ifdef DDB
		Debugger();
#else
		prom_haltbutton();
#endif
	}

	icsr = *(volatile uint32_t *)MIPS_PHYS_TO_KSEG1(KN230_SYS_ICSR);

	/* handle clock interrupts ASAP */
	if (ipending & MIPS_INT_MASK_2) {
		struct clockframe cf;

		__asm volatile("lbu $0,48(%0)" ::
			"r"(MIPS_PHYS_TO_KSEG1(KN01_SYS_CLOCK)));
		cf.pc = pc;
		cf.sr = status;
		cf.intr = (curcpu()->ci_idepth > 1);
		hardclock(&cf);
		pmax_clock_evcnt.ev_count++;
	}

	if (ipending & MIPS_INT_MASK_0) {
		CALLINTR(SYS_DEV_SCC0, KN230_CSR_INTR_DZ0);
		CALLINTR(SYS_DEV_OPT0, KN230_CSR_INTR_OPT0);
		CALLINTR(SYS_DEV_OPT1, KN230_CSR_INTR_OPT1);
	}

	if (ipending & MIPS_INT_MASK_1) {
		CALLINTR(SYS_DEV_LANCE, KN230_CSR_INTR_LANCE);
		CALLINTR(SYS_DEV_SCSI, KN230_CSR_INTR_SII);
	}

	if (ipending & MIPS_INT_MASK_3) {
		dec_5100_memintr();
		pmax_memerr_evcnt.ev_count++;
	}
}