| // SPDX-License-Identifier: GPL-2.0-or-later |
| /* |
| * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com> |
| */ |
| |
| /* |
| * This file reads all the special sections which have alternate instructions |
| * which can be patched in or redirected to at runtime. |
| */ |
| |
| #include <stdlib.h> |
| #include <string.h> |
| |
| #include "builtin.h" |
| #include "special.h" |
| #include "warn.h" |
| |
| #define EX_ENTRY_SIZE 12 |
| #define EX_ORIG_OFFSET 0 |
| #define EX_NEW_OFFSET 4 |
| |
| #define JUMP_ENTRY_SIZE 16 |
| #define JUMP_ORIG_OFFSET 0 |
| #define JUMP_NEW_OFFSET 4 |
| |
| #define ALT_ENTRY_SIZE 13 |
| #define ALT_ORIG_OFFSET 0 |
| #define ALT_NEW_OFFSET 4 |
| #define ALT_FEATURE_OFFSET 8 |
| #define ALT_ORIG_LEN_OFFSET 10 |
| #define ALT_NEW_LEN_OFFSET 11 |
| |
| #define X86_FEATURE_POPCNT (4*32+23) |
| #define X86_FEATURE_SMAP (9*32+20) |
| |
| struct special_entry { |
| const char *sec; |
| bool group, jump_or_nop; |
| unsigned char size, orig, new; |
| unsigned char orig_len, new_len; /* group only */ |
| unsigned char feature; /* ALTERNATIVE macro CPU feature */ |
| }; |
| |
| struct special_entry entries[] = { |
| { |
| .sec = ".altinstructions", |
| .group = true, |
| .size = ALT_ENTRY_SIZE, |
| .orig = ALT_ORIG_OFFSET, |
| .orig_len = ALT_ORIG_LEN_OFFSET, |
| .new = ALT_NEW_OFFSET, |
| .new_len = ALT_NEW_LEN_OFFSET, |
| .feature = ALT_FEATURE_OFFSET, |
| }, |
| { |
| .sec = "__jump_table", |
| .jump_or_nop = true, |
| .size = JUMP_ENTRY_SIZE, |
| .orig = JUMP_ORIG_OFFSET, |
| .new = JUMP_NEW_OFFSET, |
| }, |
| { |
| .sec = "__ex_table", |
| .size = EX_ENTRY_SIZE, |
| .orig = EX_ORIG_OFFSET, |
| .new = EX_NEW_OFFSET, |
| }, |
| {}, |
| }; |
| |
| static int get_alt_entry(struct elf *elf, struct special_entry *entry, |
| struct section *sec, int idx, |
| struct special_alt *alt) |
| { |
| struct reloc *orig_reloc, *new_reloc; |
| unsigned long offset; |
| |
| offset = idx * entry->size; |
| |
| alt->group = entry->group; |
| alt->jump_or_nop = entry->jump_or_nop; |
| |
| if (alt->group) { |
| alt->orig_len = *(unsigned char *)(sec->data->d_buf + offset + |
| entry->orig_len); |
| alt->new_len = *(unsigned char *)(sec->data->d_buf + offset + |
| entry->new_len); |
| } |
| |
| if (entry->feature) { |
| unsigned short feature; |
| |
| feature = *(unsigned short *)(sec->data->d_buf + offset + |
| entry->feature); |
| |
| /* |
| * It has been requested that we don't validate the !POPCNT |
| * feature path which is a "very very small percentage of |
| * machines". |
| */ |
| if (feature == X86_FEATURE_POPCNT) |
| alt->skip_orig = true; |
| |
| /* |
| * If UACCESS validation is enabled; force that alternative; |
| * otherwise force it the other way. |
| * |
| * What we want to avoid is having both the original and the |
| * alternative code flow at the same time, in that case we can |
| * find paths that see the STAC but take the NOP instead of |
| * CLAC and the other way around. |
| */ |
| if (feature == X86_FEATURE_SMAP) { |
| if (uaccess) |
| alt->skip_orig = true; |
| else |
| alt->skip_alt = true; |
| } |
| } |
| |
| orig_reloc = find_reloc_by_dest(elf, sec, offset + entry->orig); |
| if (!orig_reloc) { |
| WARN_FUNC("can't find orig reloc", sec, offset + entry->orig); |
| return -1; |
| } |
| if (orig_reloc->sym->type != STT_SECTION) { |
| WARN_FUNC("don't know how to handle non-section reloc symbol %s", |
| sec, offset + entry->orig, orig_reloc->sym->name); |
| return -1; |
| } |
| |
| alt->orig_sec = orig_reloc->sym->sec; |
| alt->orig_off = orig_reloc->addend; |
| |
| if (!entry->group || alt->new_len) { |
| new_reloc = find_reloc_by_dest(elf, sec, offset + entry->new); |
| if (!new_reloc) { |
| WARN_FUNC("can't find new reloc", |
| sec, offset + entry->new); |
| return -1; |
| } |
| |
| alt->new_sec = new_reloc->sym->sec; |
| alt->new_off = (unsigned int)new_reloc->addend; |
| |
| /* _ASM_EXTABLE_EX hack */ |
| if (alt->new_off >= 0x7ffffff0) |
| alt->new_off -= 0x7ffffff0; |
| } |
| |
| return 0; |
| } |
| |
| /* |
| * Read all the special sections and create a list of special_alt structs which |
| * describe all the alternate instructions which can be patched in or |
| * redirected to at runtime. |
| */ |
| int special_get_alts(struct elf *elf, struct list_head *alts) |
| { |
| struct special_entry *entry; |
| struct section *sec; |
| unsigned int nr_entries; |
| struct special_alt *alt; |
| int idx, ret; |
| |
| INIT_LIST_HEAD(alts); |
| |
| for (entry = entries; entry->sec; entry++) { |
| sec = find_section_by_name(elf, entry->sec); |
| if (!sec) |
| continue; |
| |
| if (sec->len % entry->size != 0) { |
| WARN("%s size not a multiple of %d", |
| sec->name, entry->size); |
| return -1; |
| } |
| |
| nr_entries = sec->len / entry->size; |
| |
| for (idx = 0; idx < nr_entries; idx++) { |
| alt = malloc(sizeof(*alt)); |
| if (!alt) { |
| WARN("malloc failed"); |
| return -1; |
| } |
| memset(alt, 0, sizeof(*alt)); |
| |
| ret = get_alt_entry(elf, entry, sec, idx, alt); |
| if (ret) |
| return ret; |
| |
| list_add_tail(&alt->list, alts); |
| } |
| } |
| |
| return 0; |
| } |