void __init exynos4_init_irq(void) { int irq; gic_bank_offset = soc_is_exynos4412() ? 0x4000 : 0x8000; gic_init(0, IRQ_PPI_MCT_L, S5P_VA_GIC_DIST, S5P_VA_GIC_CPU); gic_arch_extn.irq_set_wake = s3c_irq_wake; for (irq = 0; irq < COMMON_COMBINER_NR; irq++) { combiner_init(irq, (void __iomem *)S5P_VA_COMBINER(irq), COMBINER_IRQ(irq, 0)); combiner_cascade_irq(irq, COMBINER_MAP(irq)); } if (soc_is_exynos4412() && (samsung_rev() >= EXYNOS4412_REV_1_0)) { for (irq = COMMON_COMBINER_NR; irq < MAX_COMBINER_NR; irq++) { combiner_init(irq, (void __iomem *)S5P_VA_COMBINER(irq), COMBINER_IRQ(irq, 0)); combiner_cascade_irq(irq, COMBINER_MAP(irq)); } } /* The parameters of s5p_init_irq() are for VIC init. * Theses parameters should be NULL and 0 because EXYNOS4 * uses GIC instead of VIC. */ s5p_init_irq(NULL, 0); }
void __init exynos4_init_irq(void) { int irq; unsigned int gic_bank_offset; gic_bank_offset = soc_is_exynos4412() ? 0x4000 : 0x8000; if (!of_have_populated_dt()) gic_init_bases(0, IRQ_PPI(0), S5P_VA_GIC_DIST, S5P_VA_GIC_CPU, gic_bank_offset); #ifdef CONFIG_OF else of_irq_init(exynos4_dt_irq_match); #endif for (irq = 0; irq < MAX_COMBINER_NR; irq++) { combiner_init(irq, (void __iomem *)S5P_VA_COMBINER(irq), COMBINER_IRQ(irq, 0)); combiner_cascade_irq(irq, IRQ_SPI(irq)); } /* * The parameters of s5p_init_irq() are for VIC init. * Theses parameters should be NULL and 0 because EXYNOS4 * uses GIC instead of VIC. */ s5p_init_irq(NULL, 0); }
static void __init combiner_init(void __iomem *combiner_base, struct device_node *np, unsigned int max_nr) { int i, irq; unsigned int nr_irq; struct combiner_chip_data *combiner_data; nr_irq = max_nr * IRQ_IN_COMBINER; combiner_data = kcalloc(max_nr, sizeof (*combiner_data), GFP_KERNEL); if (!combiner_data) { pr_warning("%s: could not allocate combiner data\n", __func__); return; } combiner_irq_domain = irq_domain_add_linear(np, nr_irq, &combiner_irq_domain_ops, combiner_data); if (WARN_ON(!combiner_irq_domain)) { pr_warning("%s: irq domain init failed\n", __func__); return; } for (i = 0; i < max_nr; i++) { irq = irq_of_parse_and_map(np, i); combiner_init_one(&combiner_data[i], i, combiner_base + (i >> 2) * 0x10, irq); combiner_cascade_irq(&combiner_data[i], irq); } }
void __init exynos4_init_irq(void) { int irq; gic_bank_offset = soc_is_exynos4412() ? 0x4000 : 0x8000; gic_init(0, IRQ_PPI(0), S5P_VA_GIC_DIST, S5P_VA_GIC_CPU); gic_arch_extn.irq_eoi = exynos4_gic_irq_fix_base; gic_arch_extn.irq_unmask = exynos4_gic_irq_fix_base; gic_arch_extn.irq_mask = exynos4_gic_irq_fix_base; for (irq = 0; irq < MAX_COMBINER_NR; irq++) { combiner_init(irq, (void __iomem *)S5P_VA_COMBINER(irq), COMBINER_IRQ(irq, 0)); combiner_cascade_irq(irq, IRQ_SPI(irq)); } /* * The parameters of s5p_init_irq() are for VIC init. * Theses parameters should be NULL and 0 because EXYNOS4 * uses GIC instead of VIC. */ s5p_init_irq(NULL, 0); }
static void __init combiner_init(void __iomem *combiner_base, struct device_node *np) { int i, irq, irq_base; unsigned int nr_irq, soc_max_nr; soc_max_nr = (soc_is_exynos5250() || soc_is_exynos542x()) ? EXYNOS5_MAX_COMBINER_NR : EXYNOS4_MAX_COMBINER_NR; if (np) { if (of_property_read_u32(np, "samsung,combiner-nr", &max_nr)) { pr_warning("%s: number of combiners not specified, " "setting default as %d.\n", __func__, EXYNOS4_MAX_COMBINER_NR); max_nr = EXYNOS4_MAX_COMBINER_NR; } } else { max_nr = soc_is_exynos5250() ? EXYNOS5_MAX_COMBINER_NR : EXYNOS4_MAX_COMBINER_NR; } nr_irq = max_nr * MAX_IRQ_IN_COMBINER; irq_base = irq_alloc_descs(COMBINER_IRQ(0, 0), 1, nr_irq, 0); if (IS_ERR_VALUE(irq_base)) { irq_base = COMBINER_IRQ(0, 0); pr_warning("%s: irq desc alloc failed. Continuing with %d as linux irq base\n", __func__, irq_base); } combiner_irq_domain = irq_domain_add_legacy(np, nr_irq, irq_base, 0, &combiner_irq_domain_ops, &combiner_data); if (WARN_ON(!combiner_irq_domain)) { pr_warning("%s: irq domain init failed\n", __func__); return; } for (i = 0; i < max_nr; i++) { combiner_init_one(i, combiner_base + (i >> 2) * 0x10); irq = IRQ_SPI(i); #ifdef CONFIG_OF if (np) irq = irq_of_parse_and_map(np, i); #endif combiner_cascade_irq(i, irq); } #ifdef CONFIG_PM /* Setup suspend/resume combiner saving */ cpu_pm_register_notifier(&combiner_notifier_block); #endif }
void __init s5pv310_init_irq(void) { int irq; #ifdef CONFIG_USE_EXT_GIC gic_cpu_base_addr = S5P_VA_EXTGIC_CPU; gic_dist_init(0, S5P_VA_EXTGIC_DIST, IRQ_SPI(0)); gic_cpu_init(0, S5P_VA_EXTGIC_CPU); #else gic_cpu_base_addr = S5P_VA_GIC_CPU; gic_dist_init(0, S5P_VA_GIC_DIST, IRQ_SPI(0)); gic_cpu_init(0, S5P_VA_GIC_CPU); #endif for (irq = 0; irq < MAX_COMBINER_NR; irq++) { #ifdef CONFIG_CPU_S5PV310_EVT1 /* From SPI(0) to SPI(39) and SPI(51), SPI(53) * are connected to the interrupt combiner. These irqs * should be initialized to support cascade interrupt. */ if ((irq >= 40) && !(irq == 51) && !(irq == 53)) continue; #endif #ifdef CONFIG_USE_EXT_GIC combiner_init(irq, (void __iomem *)S5P_VA_EXTCOMBINER(irq), COMBINER_IRQ(irq, 0)); #else combiner_init(irq, (void __iomem *)S5P_VA_COMBINER(irq), COMBINER_IRQ(irq, 0)); #endif combiner_cascade_irq(irq, IRQ_SPI(irq)); } /* The parameters of s5p_init_irq() are for VIC init. * Theses parameters should be NULL and 0 because S5PV310 * uses GIC instead of VIC. */ s5p_init_irq(NULL, 0); /* Set s3c_irq_wake as set_wake() of GIC irq_chip */ get_irq_chip(IRQ_RTC_ALARM)->set_wake = s3c_irq_wake; }
void __init s5pv310_init_irq(void) { int irq; gic_cpu_base_addr = S5P_VA_GIC_CPU; gic_dist_init(0, S5P_VA_GIC_DIST, IRQ_LOCALTIMER); gic_cpu_init(0, S5P_VA_GIC_CPU); for (irq = 0; irq < MAX_COMBINER_NR; irq++) { combiner_init(irq, (void __iomem *)S5P_VA_COMBINER(irq), COMBINER_IRQ(irq, 0)); combiner_cascade_irq(irq, IRQ_SPI(irq)); } /* The parameters of s5p_init_irq() are for VIC init. * Theses parameters should be NULL and 0 because S5PV310 * uses GIC instead of VIC. */ s5p_init_irq(NULL, 0); }