Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
Thomas Gleixner | e6bea9c | 2011-02-09 13:16:52 +0100 | [diff] [blame] | 2 | /* |
| 3 | * Internal header to deal with irq_desc->status which will be renamed |
| 4 | * to irq_desc->settings. |
| 5 | */ |
| 6 | enum { |
| 7 | _IRQ_DEFAULT_INIT_FLAGS = IRQ_DEFAULT_INIT_FLAGS, |
Thomas Gleixner | a005677 | 2011-02-08 17:11:03 +0100 | [diff] [blame] | 8 | _IRQ_PER_CPU = IRQ_PER_CPU, |
Thomas Gleixner | 876dbd4 | 2011-02-08 17:28:12 +0100 | [diff] [blame] | 9 | _IRQ_LEVEL = IRQ_LEVEL, |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 10 | _IRQ_NOPROBE = IRQ_NOPROBE, |
| 11 | _IRQ_NOREQUEST = IRQ_NOREQUEST, |
Paul Mundt | 7f1b124 | 2011-04-07 06:01:44 +0900 | [diff] [blame] | 12 | _IRQ_NOTHREAD = IRQ_NOTHREAD, |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 13 | _IRQ_NOAUTOEN = IRQ_NOAUTOEN, |
| 14 | _IRQ_MOVE_PCNTXT = IRQ_MOVE_PCNTXT, |
Thomas Gleixner | a005677 | 2011-02-08 17:11:03 +0100 | [diff] [blame] | 15 | _IRQ_NO_BALANCING = IRQ_NO_BALANCING, |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 16 | _IRQ_NESTED_THREAD = IRQ_NESTED_THREAD, |
Marc Zyngier | 31d9d9b | 2011-09-23 17:03:06 +0100 | [diff] [blame] | 17 | _IRQ_PER_CPU_DEVID = IRQ_PER_CPU_DEVID, |
Thomas Gleixner | b39898c | 2013-11-06 12:30:07 +0100 | [diff] [blame] | 18 | _IRQ_IS_POLLED = IRQ_IS_POLLED, |
Thomas Gleixner | e984977 | 2015-10-09 23:28:58 +0200 | [diff] [blame] | 19 | _IRQ_DISABLE_UNLAZY = IRQ_DISABLE_UNLAZY, |
Marc Zyngier | 83cfac9 | 2020-05-19 14:58:13 +0100 | [diff] [blame] | 20 | _IRQ_HIDDEN = IRQ_HIDDEN, |
Thomas Gleixner | a005677 | 2011-02-08 17:11:03 +0100 | [diff] [blame] | 21 | _IRQF_MODIFY_MASK = IRQF_MODIFY_MASK, |
Thomas Gleixner | e6bea9c | 2011-02-09 13:16:52 +0100 | [diff] [blame] | 22 | }; |
Thomas Gleixner | 009b4c3 | 2011-02-07 21:48:49 +0100 | [diff] [blame] | 23 | |
Thomas Gleixner | a005677 | 2011-02-08 17:11:03 +0100 | [diff] [blame] | 24 | #define IRQ_PER_CPU GOT_YOU_MORON |
Thomas Gleixner | a005677 | 2011-02-08 17:11:03 +0100 | [diff] [blame] | 25 | #define IRQ_NO_BALANCING GOT_YOU_MORON |
Thomas Gleixner | 876dbd4 | 2011-02-08 17:28:12 +0100 | [diff] [blame] | 26 | #define IRQ_LEVEL GOT_YOU_MORON |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 27 | #define IRQ_NOPROBE GOT_YOU_MORON |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 28 | #define IRQ_NOREQUEST GOT_YOU_MORON |
Paul Mundt | 7f1b124 | 2011-04-07 06:01:44 +0900 | [diff] [blame] | 29 | #define IRQ_NOTHREAD GOT_YOU_MORON |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 30 | #define IRQ_NOAUTOEN GOT_YOU_MORON |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 31 | #define IRQ_NESTED_THREAD GOT_YOU_MORON |
Marc Zyngier | 31d9d9b | 2011-09-23 17:03:06 +0100 | [diff] [blame] | 32 | #define IRQ_PER_CPU_DEVID GOT_YOU_MORON |
Thomas Gleixner | b39898c | 2013-11-06 12:30:07 +0100 | [diff] [blame] | 33 | #define IRQ_IS_POLLED GOT_YOU_MORON |
Thomas Gleixner | e984977 | 2015-10-09 23:28:58 +0200 | [diff] [blame] | 34 | #define IRQ_DISABLE_UNLAZY GOT_YOU_MORON |
Marc Zyngier | 83cfac9 | 2020-05-19 14:58:13 +0100 | [diff] [blame] | 35 | #define IRQ_HIDDEN GOT_YOU_MORON |
Thomas Gleixner | a005677 | 2011-02-08 17:11:03 +0100 | [diff] [blame] | 36 | #undef IRQF_MODIFY_MASK |
| 37 | #define IRQF_MODIFY_MASK GOT_YOU_MORON |
| 38 | |
| 39 | static inline void |
| 40 | irq_settings_clr_and_set(struct irq_desc *desc, u32 clr, u32 set) |
| 41 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 42 | desc->status_use_accessors &= ~(clr & _IRQF_MODIFY_MASK); |
| 43 | desc->status_use_accessors |= (set & _IRQF_MODIFY_MASK); |
Thomas Gleixner | a005677 | 2011-02-08 17:11:03 +0100 | [diff] [blame] | 44 | } |
| 45 | |
| 46 | static inline bool irq_settings_is_per_cpu(struct irq_desc *desc) |
| 47 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 48 | return desc->status_use_accessors & _IRQ_PER_CPU; |
Thomas Gleixner | a005677 | 2011-02-08 17:11:03 +0100 | [diff] [blame] | 49 | } |
| 50 | |
Marc Zyngier | 31d9d9b | 2011-09-23 17:03:06 +0100 | [diff] [blame] | 51 | static inline bool irq_settings_is_per_cpu_devid(struct irq_desc *desc) |
| 52 | { |
| 53 | return desc->status_use_accessors & _IRQ_PER_CPU_DEVID; |
| 54 | } |
| 55 | |
Thomas Gleixner | a005677 | 2011-02-08 17:11:03 +0100 | [diff] [blame] | 56 | static inline void irq_settings_set_per_cpu(struct irq_desc *desc) |
| 57 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 58 | desc->status_use_accessors |= _IRQ_PER_CPU; |
Thomas Gleixner | a005677 | 2011-02-08 17:11:03 +0100 | [diff] [blame] | 59 | } |
| 60 | |
| 61 | static inline void irq_settings_set_no_balancing(struct irq_desc *desc) |
| 62 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 63 | desc->status_use_accessors |= _IRQ_NO_BALANCING; |
Thomas Gleixner | a005677 | 2011-02-08 17:11:03 +0100 | [diff] [blame] | 64 | } |
| 65 | |
| 66 | static inline bool irq_settings_has_no_balance_set(struct irq_desc *desc) |
| 67 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 68 | return desc->status_use_accessors & _IRQ_NO_BALANCING; |
Thomas Gleixner | a005677 | 2011-02-08 17:11:03 +0100 | [diff] [blame] | 69 | } |
Thomas Gleixner | 876dbd4 | 2011-02-08 17:28:12 +0100 | [diff] [blame] | 70 | |
| 71 | static inline u32 irq_settings_get_trigger_mask(struct irq_desc *desc) |
| 72 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 73 | return desc->status_use_accessors & IRQ_TYPE_SENSE_MASK; |
Thomas Gleixner | 876dbd4 | 2011-02-08 17:28:12 +0100 | [diff] [blame] | 74 | } |
| 75 | |
| 76 | static inline void |
| 77 | irq_settings_set_trigger_mask(struct irq_desc *desc, u32 mask) |
| 78 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 79 | desc->status_use_accessors &= ~IRQ_TYPE_SENSE_MASK; |
| 80 | desc->status_use_accessors |= mask & IRQ_TYPE_SENSE_MASK; |
Thomas Gleixner | 876dbd4 | 2011-02-08 17:28:12 +0100 | [diff] [blame] | 81 | } |
| 82 | |
| 83 | static inline bool irq_settings_is_level(struct irq_desc *desc) |
| 84 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 85 | return desc->status_use_accessors & _IRQ_LEVEL; |
Thomas Gleixner | 876dbd4 | 2011-02-08 17:28:12 +0100 | [diff] [blame] | 86 | } |
| 87 | |
| 88 | static inline void irq_settings_clr_level(struct irq_desc *desc) |
| 89 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 90 | desc->status_use_accessors &= ~_IRQ_LEVEL; |
Thomas Gleixner | 876dbd4 | 2011-02-08 17:28:12 +0100 | [diff] [blame] | 91 | } |
| 92 | |
| 93 | static inline void irq_settings_set_level(struct irq_desc *desc) |
| 94 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 95 | desc->status_use_accessors |= _IRQ_LEVEL; |
Thomas Gleixner | 876dbd4 | 2011-02-08 17:28:12 +0100 | [diff] [blame] | 96 | } |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 97 | |
| 98 | static inline bool irq_settings_can_request(struct irq_desc *desc) |
| 99 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 100 | return !(desc->status_use_accessors & _IRQ_NOREQUEST); |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 101 | } |
| 102 | |
| 103 | static inline void irq_settings_clr_norequest(struct irq_desc *desc) |
| 104 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 105 | desc->status_use_accessors &= ~_IRQ_NOREQUEST; |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 106 | } |
| 107 | |
| 108 | static inline void irq_settings_set_norequest(struct irq_desc *desc) |
| 109 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 110 | desc->status_use_accessors |= _IRQ_NOREQUEST; |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 111 | } |
| 112 | |
Paul Mundt | 7f1b124 | 2011-04-07 06:01:44 +0900 | [diff] [blame] | 113 | static inline bool irq_settings_can_thread(struct irq_desc *desc) |
| 114 | { |
| 115 | return !(desc->status_use_accessors & _IRQ_NOTHREAD); |
| 116 | } |
| 117 | |
| 118 | static inline void irq_settings_clr_nothread(struct irq_desc *desc) |
| 119 | { |
| 120 | desc->status_use_accessors &= ~_IRQ_NOTHREAD; |
| 121 | } |
| 122 | |
| 123 | static inline void irq_settings_set_nothread(struct irq_desc *desc) |
| 124 | { |
| 125 | desc->status_use_accessors |= _IRQ_NOTHREAD; |
| 126 | } |
| 127 | |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 128 | static inline bool irq_settings_can_probe(struct irq_desc *desc) |
| 129 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 130 | return !(desc->status_use_accessors & _IRQ_NOPROBE); |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 131 | } |
| 132 | |
| 133 | static inline void irq_settings_clr_noprobe(struct irq_desc *desc) |
| 134 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 135 | desc->status_use_accessors &= ~_IRQ_NOPROBE; |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 136 | } |
| 137 | |
| 138 | static inline void irq_settings_set_noprobe(struct irq_desc *desc) |
| 139 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 140 | desc->status_use_accessors |= _IRQ_NOPROBE; |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 141 | } |
| 142 | |
| 143 | static inline bool irq_settings_can_move_pcntxt(struct irq_desc *desc) |
| 144 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 145 | return desc->status_use_accessors & _IRQ_MOVE_PCNTXT; |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 146 | } |
| 147 | |
| 148 | static inline bool irq_settings_can_autoenable(struct irq_desc *desc) |
| 149 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 150 | return !(desc->status_use_accessors & _IRQ_NOAUTOEN); |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 151 | } |
| 152 | |
| 153 | static inline bool irq_settings_is_nested_thread(struct irq_desc *desc) |
| 154 | { |
Thomas Gleixner | 0c6f8a8 | 2011-03-28 13:32:20 +0200 | [diff] [blame] | 155 | return desc->status_use_accessors & _IRQ_NESTED_THREAD; |
Thomas Gleixner | 1ccb4e61 | 2011-02-09 14:44:17 +0100 | [diff] [blame] | 156 | } |
Thomas Gleixner | b39898c | 2013-11-06 12:30:07 +0100 | [diff] [blame] | 157 | |
| 158 | static inline bool irq_settings_is_polled(struct irq_desc *desc) |
| 159 | { |
| 160 | return desc->status_use_accessors & _IRQ_IS_POLLED; |
| 161 | } |
Thomas Gleixner | e984977 | 2015-10-09 23:28:58 +0200 | [diff] [blame] | 162 | |
| 163 | static inline bool irq_settings_disable_unlazy(struct irq_desc *desc) |
| 164 | { |
| 165 | return desc->status_use_accessors & _IRQ_DISABLE_UNLAZY; |
| 166 | } |
| 167 | |
| 168 | static inline void irq_settings_clr_disable_unlazy(struct irq_desc *desc) |
| 169 | { |
| 170 | desc->status_use_accessors &= ~_IRQ_DISABLE_UNLAZY; |
| 171 | } |
Marc Zyngier | 83cfac9 | 2020-05-19 14:58:13 +0100 | [diff] [blame] | 172 | |
| 173 | static inline bool irq_settings_is_hidden(struct irq_desc *desc) |
| 174 | { |
| 175 | return desc->status_use_accessors & _IRQ_HIDDEN; |
| 176 | } |