Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
Thomas Gleixner | e6bea9c | 2011-02-09 13:16:52 +0100 | [diff] [blame] | 2 | /* |
| 3 | * Internal header to deal with irq_desc->status which will be renamed |
| 4 | * to irq_desc->settings. |
| 5 | */ |
| 6 | enum { |
| 7 | _IRQ_DEFAULT_INIT_FLAGS = IRQ_DEFAULT_INIT_FLAGS, |
Thomas Gleixner | a005677 | 2011-02-08 17:11:03 +0100 | [diff] [blame] | 8 | _IRQ_PER_CPU = IRQ_PER_CPU, |
Thomas Gleixner | 876dbd4 | 2011-02-08 17:28:12 +0100 | [diff] [blame] | 9 | _IRQ_LEVEL = IRQ_LEVEL, |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 10 | _IRQ_NOPROBE = IRQ_NOPROBE, |
| 11 | _IRQ_NOREQUEST = IRQ_NOREQUEST, |
Paul Mundt | 7f1b124 | 2011-04-07 06:01:44 +0900 | [diff] [blame] | 12 | _IRQ_NOTHREAD = IRQ_NOTHREAD, |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 13 | _IRQ_NOAUTOEN = IRQ_NOAUTOEN, |
| 14 | _IRQ_MOVE_PCNTXT = IRQ_MOVE_PCNTXT, |
Thomas Gleixner | a005677 | 2011-02-08 17:11:03 +0100 | [diff] [blame] | 15 | _IRQ_NO_BALANCING = IRQ_NO_BALANCING, |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 16 | _IRQ_NESTED_THREAD = IRQ_NESTED_THREAD, |
Marc Zyngier | 31d9d9b | 2011-09-23 17:03:06 +0100 | [diff] [blame] | 17 | _IRQ_PER_CPU_DEVID = IRQ_PER_CPU_DEVID, |
Thomas Gleixner | b39898c | 2013-11-06 12:30:07 +0100 | [diff] [blame] | 18 | _IRQ_IS_POLLED = IRQ_IS_POLLED, |
Thomas Gleixner | e984977 | 2015-10-09 23:28:58 +0200 | [diff] [blame] | 19 | _IRQ_DISABLE_UNLAZY = IRQ_DISABLE_UNLAZY, |
Thomas Gleixner | a005677 | 2011-02-08 17:11:03 +0100 | [diff] [blame] | 20 | _IRQF_MODIFY_MASK = IRQF_MODIFY_MASK, |
Thomas Gleixner | e6bea9c | 2011-02-09 13:16:52 +0100 | [diff] [blame] | 21 | }; |
Thomas Gleixner | 009b4c3 | 2011-02-07 21:48:49 +0100 | [diff] [blame] | 22 | |
Thomas Gleixner | a005677 | 2011-02-08 17:11:03 +0100 | [diff] [blame] | 23 | #define IRQ_PER_CPU GOT_YOU_MORON |
Thomas Gleixner | a005677 | 2011-02-08 17:11:03 +0100 | [diff] [blame] | 24 | #define IRQ_NO_BALANCING GOT_YOU_MORON |
Thomas Gleixner | 876dbd4 | 2011-02-08 17:28:12 +0100 | [diff] [blame] | 25 | #define IRQ_LEVEL GOT_YOU_MORON |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 26 | #define IRQ_NOPROBE GOT_YOU_MORON |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 27 | #define IRQ_NOREQUEST GOT_YOU_MORON |
Paul Mundt | 7f1b124 | 2011-04-07 06:01:44 +0900 | [diff] [blame] | 28 | #define IRQ_NOTHREAD GOT_YOU_MORON |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 29 | #define IRQ_NOAUTOEN GOT_YOU_MORON |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 30 | #define IRQ_NESTED_THREAD GOT_YOU_MORON |
Marc Zyngier | 31d9d9b | 2011-09-23 17:03:06 +0100 | [diff] [blame] | 31 | #define IRQ_PER_CPU_DEVID GOT_YOU_MORON |
Thomas Gleixner | b39898c | 2013-11-06 12:30:07 +0100 | [diff] [blame] | 32 | #define IRQ_IS_POLLED GOT_YOU_MORON |
Thomas Gleixner | e984977 | 2015-10-09 23:28:58 +0200 | [diff] [blame] | 33 | #define IRQ_DISABLE_UNLAZY GOT_YOU_MORON |
Thomas Gleixner | a005677 | 2011-02-08 17:11:03 +0100 | [diff] [blame] | 34 | #undef IRQF_MODIFY_MASK |
| 35 | #define IRQF_MODIFY_MASK GOT_YOU_MORON |
| 36 | |
| 37 | static inline void |
| 38 | irq_settings_clr_and_set(struct irq_desc *desc, u32 clr, u32 set) |
| 39 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 40 | desc->status_use_accessors &= ~(clr & _IRQF_MODIFY_MASK); |
| 41 | desc->status_use_accessors |= (set & _IRQF_MODIFY_MASK); |
Thomas Gleixner | a005677 | 2011-02-08 17:11:03 +0100 | [diff] [blame] | 42 | } |
| 43 | |
| 44 | static inline bool irq_settings_is_per_cpu(struct irq_desc *desc) |
| 45 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 46 | return desc->status_use_accessors & _IRQ_PER_CPU; |
Thomas Gleixner | a005677 | 2011-02-08 17:11:03 +0100 | [diff] [blame] | 47 | } |
| 48 | |
Marc Zyngier | 31d9d9b | 2011-09-23 17:03:06 +0100 | [diff] [blame] | 49 | static inline bool irq_settings_is_per_cpu_devid(struct irq_desc *desc) |
| 50 | { |
| 51 | return desc->status_use_accessors & _IRQ_PER_CPU_DEVID; |
| 52 | } |
| 53 | |
Thomas Gleixner | a005677 | 2011-02-08 17:11:03 +0100 | [diff] [blame] | 54 | static inline void irq_settings_set_per_cpu(struct irq_desc *desc) |
| 55 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 56 | desc->status_use_accessors |= _IRQ_PER_CPU; |
Thomas Gleixner | a005677 | 2011-02-08 17:11:03 +0100 | [diff] [blame] | 57 | } |
| 58 | |
| 59 | static inline void irq_settings_set_no_balancing(struct irq_desc *desc) |
| 60 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 61 | desc->status_use_accessors |= _IRQ_NO_BALANCING; |
Thomas Gleixner | a005677 | 2011-02-08 17:11:03 +0100 | [diff] [blame] | 62 | } |
| 63 | |
| 64 | static inline bool irq_settings_has_no_balance_set(struct irq_desc *desc) |
| 65 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 66 | return desc->status_use_accessors & _IRQ_NO_BALANCING; |
Thomas Gleixner | a005677 | 2011-02-08 17:11:03 +0100 | [diff] [blame] | 67 | } |
Thomas Gleixner | 876dbd4 | 2011-02-08 17:28:12 +0100 | [diff] [blame] | 68 | |
| 69 | static inline u32 irq_settings_get_trigger_mask(struct irq_desc *desc) |
| 70 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 71 | return desc->status_use_accessors & IRQ_TYPE_SENSE_MASK; |
Thomas Gleixner | 876dbd4 | 2011-02-08 17:28:12 +0100 | [diff] [blame] | 72 | } |
| 73 | |
| 74 | static inline void |
| 75 | irq_settings_set_trigger_mask(struct irq_desc *desc, u32 mask) |
| 76 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 77 | desc->status_use_accessors &= ~IRQ_TYPE_SENSE_MASK; |
| 78 | desc->status_use_accessors |= mask & IRQ_TYPE_SENSE_MASK; |
Thomas Gleixner | 876dbd4 | 2011-02-08 17:28:12 +0100 | [diff] [blame] | 79 | } |
| 80 | |
| 81 | static inline bool irq_settings_is_level(struct irq_desc *desc) |
| 82 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 83 | return desc->status_use_accessors & _IRQ_LEVEL; |
Thomas Gleixner | 876dbd4 | 2011-02-08 17:28:12 +0100 | [diff] [blame] | 84 | } |
| 85 | |
| 86 | static inline void irq_settings_clr_level(struct irq_desc *desc) |
| 87 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 88 | desc->status_use_accessors &= ~_IRQ_LEVEL; |
Thomas Gleixner | 876dbd4 | 2011-02-08 17:28:12 +0100 | [diff] [blame] | 89 | } |
| 90 | |
| 91 | static inline void irq_settings_set_level(struct irq_desc *desc) |
| 92 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 93 | desc->status_use_accessors |= _IRQ_LEVEL; |
Thomas Gleixner | 876dbd4 | 2011-02-08 17:28:12 +0100 | [diff] [blame] | 94 | } |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 95 | |
| 96 | static inline bool irq_settings_can_request(struct irq_desc *desc) |
| 97 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 98 | return !(desc->status_use_accessors & _IRQ_NOREQUEST); |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 99 | } |
| 100 | |
| 101 | static inline void irq_settings_clr_norequest(struct irq_desc *desc) |
| 102 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 103 | desc->status_use_accessors &= ~_IRQ_NOREQUEST; |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 104 | } |
| 105 | |
| 106 | static inline void irq_settings_set_norequest(struct irq_desc *desc) |
| 107 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 108 | desc->status_use_accessors |= _IRQ_NOREQUEST; |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 109 | } |
| 110 | |
Paul Mundt | 7f1b124 | 2011-04-07 06:01:44 +0900 | [diff] [blame] | 111 | static inline bool irq_settings_can_thread(struct irq_desc *desc) |
| 112 | { |
| 113 | return !(desc->status_use_accessors & _IRQ_NOTHREAD); |
| 114 | } |
| 115 | |
| 116 | static inline void irq_settings_clr_nothread(struct irq_desc *desc) |
| 117 | { |
| 118 | desc->status_use_accessors &= ~_IRQ_NOTHREAD; |
| 119 | } |
| 120 | |
| 121 | static inline void irq_settings_set_nothread(struct irq_desc *desc) |
| 122 | { |
| 123 | desc->status_use_accessors |= _IRQ_NOTHREAD; |
| 124 | } |
| 125 | |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 126 | static inline bool irq_settings_can_probe(struct irq_desc *desc) |
| 127 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 128 | return !(desc->status_use_accessors & _IRQ_NOPROBE); |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 129 | } |
| 130 | |
| 131 | static inline void irq_settings_clr_noprobe(struct irq_desc *desc) |
| 132 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 133 | desc->status_use_accessors &= ~_IRQ_NOPROBE; |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 134 | } |
| 135 | |
| 136 | static inline void irq_settings_set_noprobe(struct irq_desc *desc) |
| 137 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 138 | desc->status_use_accessors |= _IRQ_NOPROBE; |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 139 | } |
| 140 | |
| 141 | static inline bool irq_settings_can_move_pcntxt(struct irq_desc *desc) |
| 142 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 143 | return desc->status_use_accessors & _IRQ_MOVE_PCNTXT; |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 144 | } |
| 145 | |
| 146 | static inline bool irq_settings_can_autoenable(struct irq_desc *desc) |
| 147 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 148 | return !(desc->status_use_accessors & _IRQ_NOAUTOEN); |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 149 | } |
| 150 | |
| 151 | static inline bool irq_settings_is_nested_thread(struct irq_desc *desc) |
| 152 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 153 | return desc->status_use_accessors & _IRQ_NESTED_THREAD; |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 154 | } |
Thomas Gleixner | b39898c | 2013-11-06 12:30:07 +0100 | [diff] [blame] | 155 | |
| 156 | static inline bool irq_settings_is_polled(struct irq_desc *desc) |
| 157 | { |
| 158 | return desc->status_use_accessors & _IRQ_IS_POLLED; |
| 159 | } |
Thomas Gleixner | e984977 | 2015-10-09 23:28:58 +0200 | [diff] [blame] | 160 | |
| 161 | static inline bool irq_settings_disable_unlazy(struct irq_desc *desc) |
| 162 | { |
| 163 | return desc->status_use_accessors & _IRQ_DISABLE_UNLAZY; |
| 164 | } |
| 165 | |
| 166 | static inline void irq_settings_clr_disable_unlazy(struct irq_desc *desc) |
| 167 | { |
| 168 | desc->status_use_accessors &= ~_IRQ_DISABLE_UNLAZY; |
| 169 | } |