File 5a4fd894-1-x86-rearrange-RESTORE_ALL-to-restore-in-stack-order.patch of Package xen.7317
# Commit f85d105e27735f0e20aa30d77f03774f3ed55ae5
# Date 2018-01-05 19:57:08 +0000
# Author Andrew Cooper <andrew.cooper3@citrix.com>
# Committer Andrew Cooper <andrew.cooper3@citrix.com>
x86/entry: Rearrange RESTORE_ALL to restore register in stack order
Results in a more predictable (i.e. linear) memory access pattern.
No functional change.
This is part of XSA-254.
Signed-off-by: Andrew Cooper <andrew.cooper3@citrix.com>
Reviewed-by: Jan Beulich <jbeulich@suse.com>
Reviewed-by: Wei Liu <wei.liu2@citrix.com>
--- a/xen/include/asm-x86/x86_64/asm_defns.h
+++ b/xen/include/asm-x86/x86_64/asm_defns.h
@@ -77,7 +77,6 @@
* @compat: R8-R15 don't need reloading
*/
.macro RESTORE_ALL adj=0 compat=0
- LOAD_C_CLOBBERED \compat
.if !\compat
movq UREGS_r15(%rsp),%r15
movq UREGS_r14(%rsp),%r14
@@ -86,6 +85,7 @@
.endif
movq UREGS_rbp(%rsp),%rbp
movq UREGS_rbx(%rsp),%rbx
+ LOAD_C_CLOBBERED \compat
subq $-(UREGS_error_code-UREGS_r15+\adj), %rsp
.endm