diff options
author | Thiago Jung Bauermann <bauerman@linux.ibm.com> | 2019-08-06 00:49:17 -0400 |
---|---|---|
committer | Michael Ellerman <mpe@ellerman.id.au> | 2019-08-09 08:52:08 -0400 |
commit | 284e21fab2cfcf90dacce565e0b12f29e5df00c1 (patch) | |
tree | f3ff0d324d2c19857a4e842a1c7369e0421e1fc2 | |
parent | e740815a97e2b6d6446792f4328378e66de166d1 (diff) |
x86, s390/mm: Move sme_active() and sme_me_mask to x86-specific header
Now that generic code doesn't reference them, move sme_active() and
sme_me_mask to x86's <asm/mem_encrypt.h>.
Also remove the export for sme_active() since it's only used in files that
won't be built as modules. sme_me_mask on the other hand is used in
arch/x86/kvm/svm.c (via __sme_set() and __psp_pa()) which can be built as a
module so its export needs to stay.
Signed-off-by: Thiago Jung Bauermann <bauerman@linux.ibm.com>
Reviewed-by: Christoph Hellwig <hch@lst.de>
Reviewed-by: Tom Lendacky <thomas.lendacky@amd.com>
Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
Link: https://lore.kernel.org/r/20190806044919.10622-5-bauerman@linux.ibm.com
-rw-r--r-- | arch/s390/include/asm/mem_encrypt.h | 4 | ||||
-rw-r--r-- | arch/x86/include/asm/mem_encrypt.h | 10 | ||||
-rw-r--r-- | arch/x86/mm/mem_encrypt.c | 1 | ||||
-rw-r--r-- | include/linux/mem_encrypt.h | 14 |
4 files changed, 12 insertions, 17 deletions
diff --git a/arch/s390/include/asm/mem_encrypt.h b/arch/s390/include/asm/mem_encrypt.h index 3eb018508190..ff813a56bc30 100644 --- a/arch/s390/include/asm/mem_encrypt.h +++ b/arch/s390/include/asm/mem_encrypt.h | |||
@@ -4,9 +4,7 @@ | |||
4 | 4 | ||
5 | #ifndef __ASSEMBLY__ | 5 | #ifndef __ASSEMBLY__ |
6 | 6 | ||
7 | #define sme_me_mask 0ULL | 7 | static inline bool mem_encrypt_active(void) { return false; } |
8 | |||
9 | static inline bool sme_active(void) { return false; } | ||
10 | extern bool sev_active(void); | 8 | extern bool sev_active(void); |
11 | 9 | ||
12 | int set_memory_encrypted(unsigned long addr, int numpages); | 10 | int set_memory_encrypted(unsigned long addr, int numpages); |
diff --git a/arch/x86/include/asm/mem_encrypt.h b/arch/x86/include/asm/mem_encrypt.h index 0c196c47d621..848ce43b9040 100644 --- a/arch/x86/include/asm/mem_encrypt.h +++ b/arch/x86/include/asm/mem_encrypt.h | |||
@@ -92,6 +92,16 @@ early_set_memory_encrypted(unsigned long vaddr, unsigned long size) { return 0; | |||
92 | 92 | ||
93 | extern char __start_bss_decrypted[], __end_bss_decrypted[], __start_bss_decrypted_unused[]; | 93 | extern char __start_bss_decrypted[], __end_bss_decrypted[], __start_bss_decrypted_unused[]; |
94 | 94 | ||
95 | static inline bool mem_encrypt_active(void) | ||
96 | { | ||
97 | return sme_me_mask; | ||
98 | } | ||
99 | |||
100 | static inline u64 sme_get_me_mask(void) | ||
101 | { | ||
102 | return sme_me_mask; | ||
103 | } | ||
104 | |||
95 | #endif /* __ASSEMBLY__ */ | 105 | #endif /* __ASSEMBLY__ */ |
96 | 106 | ||
97 | #endif /* __X86_MEM_ENCRYPT_H__ */ | 107 | #endif /* __X86_MEM_ENCRYPT_H__ */ |
diff --git a/arch/x86/mm/mem_encrypt.c b/arch/x86/mm/mem_encrypt.c index fece30ca8b0c..94da5a88abe6 100644 --- a/arch/x86/mm/mem_encrypt.c +++ b/arch/x86/mm/mem_encrypt.c | |||
@@ -344,7 +344,6 @@ bool sme_active(void) | |||
344 | { | 344 | { |
345 | return sme_me_mask && !sev_enabled; | 345 | return sme_me_mask && !sev_enabled; |
346 | } | 346 | } |
347 | EXPORT_SYMBOL(sme_active); | ||
348 | 347 | ||
349 | bool sev_active(void) | 348 | bool sev_active(void) |
350 | { | 349 | { |
diff --git a/include/linux/mem_encrypt.h b/include/linux/mem_encrypt.h index 470bd53a89df..0c5b0ff9eb29 100644 --- a/include/linux/mem_encrypt.h +++ b/include/linux/mem_encrypt.h | |||
@@ -18,23 +18,11 @@ | |||
18 | 18 | ||
19 | #else /* !CONFIG_ARCH_HAS_MEM_ENCRYPT */ | 19 | #else /* !CONFIG_ARCH_HAS_MEM_ENCRYPT */ |
20 | 20 | ||
21 | #define sme_me_mask 0ULL | 21 | static inline bool mem_encrypt_active(void) { return false; } |
22 | |||
23 | static inline bool sme_active(void) { return false; } | ||
24 | static inline bool sev_active(void) { return false; } | 22 | static inline bool sev_active(void) { return false; } |
25 | 23 | ||
26 | #endif /* CONFIG_ARCH_HAS_MEM_ENCRYPT */ | 24 | #endif /* CONFIG_ARCH_HAS_MEM_ENCRYPT */ |
27 | 25 | ||
28 | static inline bool mem_encrypt_active(void) | ||
29 | { | ||
30 | return sme_me_mask; | ||
31 | } | ||
32 | |||
33 | static inline u64 sme_get_me_mask(void) | ||
34 | { | ||
35 | return sme_me_mask; | ||
36 | } | ||
37 | |||
38 | #ifdef CONFIG_AMD_MEM_ENCRYPT | 26 | #ifdef CONFIG_AMD_MEM_ENCRYPT |
39 | /* | 27 | /* |
40 | * The __sme_set() and __sme_clr() macros are useful for adding or removing | 28 | * The __sme_set() and __sme_clr() macros are useful for adding or removing |