/*
* lockdep: we want to handle all irq_desc locks as a single lock-class:
*/
-struct lock_class_key irq_desc_lock_class;
+static struct lock_class_key irq_desc_lock_class;
#if defined(CONFIG_SMP) && defined(CONFIG_GENERIC_HARDIRQS)
static void __init init_irq_default_affinity(void)
int nr_irqs = NR_IRQS;
EXPORT_SYMBOL_GPL(nr_irqs);
-DEFINE_RAW_SPINLOCK(sparse_irq_lock);
+static DEFINE_MUTEX(sparse_irq_lock);
static DECLARE_BITMAP(allocated_irqs, NR_IRQS);
#ifdef CONFIG_SPARSE_IRQ
-void __ref init_kstat_irqs(struct irq_desc *desc, int node, int nr)
-{
- void *ptr;
-
- ptr = kzalloc_node(nr * sizeof(*desc->kstat_irqs),
- GFP_ATOMIC, node);
-
- /*
- * don't overwite if can not get new one
- * init_copy_kstat_irqs() could still use old one
- */
- if (ptr) {
- printk(KERN_DEBUG " alloc kstat_irqs on node %d\n", node);
- desc->kstat_irqs = ptr;
- }
-}
-
-static RADIX_TREE(irq_desc_tree, GFP_ATOMIC);
+static RADIX_TREE(irq_desc_tree, GFP_KERNEL);
static void irq_insert_desc(unsigned int irq, struct irq_desc *desc)
{
return radix_tree_lookup(&irq_desc_tree, irq);
}
-void replace_irq_desc(unsigned int irq, struct irq_desc *desc)
-{
- void **ptr;
-
- ptr = radix_tree_lookup_slot(&irq_desc_tree, irq);
- if (ptr)
- radix_tree_replace_slot(ptr, desc);
-}
-
static void delete_irq_desc(unsigned int irq)
{
radix_tree_delete(&irq_desc_tree, irq);
#ifdef CONFIG_GENERIC_PENDING_IRQ
free_cpumask_var(desc->pending_mask);
#endif
- free_cpumask_var(desc->affinity);
+ free_cpumask_var(desc->irq_data.affinity);
}
#else
static inline void free_masks(struct irq_desc *desc) { }
static struct irq_desc *alloc_desc(int irq, int node)
{
- /* Temporary hack until we can switch to GFP_KERNEL */
- gfp_t gfp = gfp_allowed_mask == GFP_BOOT_MASK ? GFP_NOWAIT : GFP_ATOMIC;
struct irq_desc *desc;
+ gfp_t gfp = GFP_KERNEL;
desc = kzalloc_node(sizeof(*desc), gfp, node);
if (!desc)
static void free_desc(unsigned int irq)
{
struct irq_desc *desc = irq_to_desc(irq);
- unsigned long flags;
unregister_irq_proc(irq, desc);
- raw_spin_lock_irqsave(&sparse_irq_lock, flags);
+ mutex_lock(&sparse_irq_lock);
delete_irq_desc(irq);
- raw_spin_unlock_irqrestore(&sparse_irq_lock, flags);
+ mutex_unlock(&sparse_irq_lock);
free_masks(desc);
kfree(desc->kstat_irqs);
static int alloc_descs(unsigned int start, unsigned int cnt, int node)
{
struct irq_desc *desc;
- unsigned long flags;
int i;
for (i = 0; i < cnt; i++) {
desc = alloc_desc(start + i, node);
if (!desc)
goto err;
- raw_spin_lock_irqsave(&sparse_irq_lock, flags);
+ mutex_lock(&sparse_irq_lock);
irq_insert_desc(start + i, desc);
- raw_spin_unlock_irqrestore(&sparse_irq_lock, flags);
+ mutex_unlock(&sparse_irq_lock);
}
return start;
for (i--; i >= 0; i--)
free_desc(start + i);
- raw_spin_lock_irqsave(&sparse_irq_lock, flags);
+ mutex_lock(&sparse_irq_lock);
bitmap_clear(allocated_irqs, start, cnt);
- raw_spin_unlock_irqrestore(&sparse_irq_lock, flags);
+ mutex_unlock(&sparse_irq_lock);
return -ENOMEM;
}
*/
void irq_free_descs(unsigned int from, unsigned int cnt)
{
- unsigned long flags;
int i;
if (from >= nr_irqs || (from + cnt) > nr_irqs)
for (i = 0; i < cnt; i++)
free_desc(from + i);
- raw_spin_lock_irqsave(&sparse_irq_lock, flags);
+ mutex_lock(&sparse_irq_lock);
bitmap_clear(allocated_irqs, from, cnt);
- raw_spin_unlock_irqrestore(&sparse_irq_lock, flags);
+ mutex_unlock(&sparse_irq_lock);
}
/**
int __ref
irq_alloc_descs(int irq, unsigned int from, unsigned int cnt, int node)
{
- unsigned long flags;
int start, ret;
if (!cnt)
return -EINVAL;
- raw_spin_lock_irqsave(&sparse_irq_lock, flags);
+ mutex_lock(&sparse_irq_lock);
start = bitmap_find_next_zero_area(allocated_irqs, nr_irqs, from, cnt, 0);
ret = -EEXIST;
goto err;
bitmap_set(allocated_irqs, start, cnt);
- raw_spin_unlock_irqrestore(&sparse_irq_lock, flags);
+ mutex_unlock(&sparse_irq_lock);
return alloc_descs(start, cnt, node);
err:
- raw_spin_unlock_irqrestore(&sparse_irq_lock, flags);
+ mutex_unlock(&sparse_irq_lock);
return ret;
}
*/
int irq_reserve_irqs(unsigned int from, unsigned int cnt)
{
- unsigned long flags;
unsigned int start;
int ret = 0;
if (!cnt || (from + cnt) > nr_irqs)
return -EINVAL;
- raw_spin_lock_irqsave(&sparse_irq_lock, flags);
+ mutex_lock(&sparse_irq_lock);
start = bitmap_find_next_zero_area(allocated_irqs, nr_irqs, from, cnt, 0);
if (start == from)
bitmap_set(allocated_irqs, start, cnt);
else
ret = -EEXIST;
- raw_spin_unlock_irqrestore(&sparse_irq_lock, flags);
+ mutex_unlock(&sparse_irq_lock);
return ret;
}