summaryrefslogtreecommitdiff
path: root/system/xen/xsa/xsa260-2.patch
diff options
context:
space:
mode:
Diffstat (limited to 'system/xen/xsa/xsa260-2.patch')
-rw-r--r--system/xen/xsa/xsa260-2.patch110
1 files changed, 110 insertions, 0 deletions
diff --git a/system/xen/xsa/xsa260-2.patch b/system/xen/xsa/xsa260-2.patch
new file mode 100644
index 0000000000..be71b2438f
--- /dev/null
+++ b/system/xen/xsa/xsa260-2.patch
@@ -0,0 +1,110 @@
+From: Andrew Cooper <andrew.cooper3@citrix.com>
+Subject: x86/pv: Move exception injection into {,compat_}test_all_events()
+
+This allows paths to jump straight to {,compat_}test_all_events() and have
+injection of pending exceptions happen automatically, rather than requiring
+all calling paths to handle exceptions themselves.
+
+The normal exception path is simplified as a result, and
+compat_post_handle_exception() is removed entirely.
+
+This is part of XSA-260 / CVE-2018-8897.
+
+Signed-off-by: Andrew Cooper <andrew.cooper3@citrix.com>
+Reviewed-by: Jan Beulich <jbeulich@suse.com>
+
+--- a/xen/arch/x86/x86_64/compat/entry.S
++++ b/xen/arch/x86/x86_64/compat/entry.S
+@@ -39,6 +39,12 @@ ENTRY(compat_test_all_events)
+ leaq irq_stat+IRQSTAT_softirq_pending(%rip),%rcx
+ cmpl $0,(%rcx,%rax,1)
+ jne compat_process_softirqs
++
++ /* Inject exception if pending. */
++ lea VCPU_trap_bounce(%rbx), %rdx
++ testb $TBF_EXCEPTION, TRAPBOUNCE_flags(%rdx)
++ jnz .Lcompat_process_trapbounce
++
+ testb $1,VCPU_mce_pending(%rbx)
+ jnz compat_process_mce
+ .Lcompat_test_guest_nmi:
+@@ -68,6 +74,15 @@ compat_process_softirqs:
+ call do_softirq
+ jmp compat_test_all_events
+
++ ALIGN
++/* %rbx: struct vcpu, %rdx: struct trap_bounce */
++.Lcompat_process_trapbounce:
++ sti
++.Lcompat_bounce_exception:
++ call compat_create_bounce_frame
++ movb $0, TRAPBOUNCE_flags(%rdx)
++ jmp compat_test_all_events
++
+ ALIGN
+ /* %rbx: struct vcpu */
+ compat_process_mce:
+@@ -189,15 +204,6 @@ ENTRY(cr4_pv32_restore)
+ xor %eax, %eax
+ ret
+
+-/* %rdx: trap_bounce, %rbx: struct vcpu */
+-ENTRY(compat_post_handle_exception)
+- testb $TBF_EXCEPTION,TRAPBOUNCE_flags(%rdx)
+- jz compat_test_all_events
+-.Lcompat_bounce_exception:
+- call compat_create_bounce_frame
+- movb $0,TRAPBOUNCE_flags(%rdx)
+- jmp compat_test_all_events
+-
+ .section .text.entry, "ax", @progbits
+
+ /* See lstar_enter for entry register state. */
+--- a/xen/arch/x86/x86_64/entry.S
++++ b/xen/arch/x86/x86_64/entry.S
+@@ -42,6 +42,12 @@ test_all_events:
+ leaq irq_stat+IRQSTAT_softirq_pending(%rip), %rcx
+ cmpl $0, (%rcx, %rax, 1)
+ jne process_softirqs
++
++ /* Inject exception if pending. */
++ lea VCPU_trap_bounce(%rbx), %rdx
++ testb $TBF_EXCEPTION, TRAPBOUNCE_flags(%rdx)
++ jnz .Lprocess_trapbounce
++
+ cmpb $0, VCPU_mce_pending(%rbx)
+ jne process_mce
+ .Ltest_guest_nmi:
+@@ -70,6 +76,15 @@ process_softirqs:
+ jmp test_all_events
+
+ ALIGN
++/* %rbx: struct vcpu, %rdx struct trap_bounce */
++.Lprocess_trapbounce:
++ sti
++.Lbounce_exception:
++ call create_bounce_frame
++ movb $0, TRAPBOUNCE_flags(%rdx)
++ jmp test_all_events
++
++ ALIGN
+ /* %rbx: struct vcpu */
+ process_mce:
+ testb $1 << VCPU_TRAP_MCE, VCPU_async_exception_mask(%rbx)
+@@ -667,15 +682,9 @@ handle_exception_saved:
+ mov %r15, STACK_CPUINFO_FIELD(xen_cr3)(%r14)
+ testb $3,UREGS_cs(%rsp)
+ jz restore_all_xen
+- leaq VCPU_trap_bounce(%rbx),%rdx
+ movq VCPU_domain(%rbx),%rax
+ testb $1,DOMAIN_is_32bit_pv(%rax)
+- jnz compat_post_handle_exception
+- testb $TBF_EXCEPTION,TRAPBOUNCE_flags(%rdx)
+- jz test_all_events
+-.Lbounce_exception:
+- call create_bounce_frame
+- movb $0,TRAPBOUNCE_flags(%rdx)
++ jnz compat_test_all_events
+ jmp test_all_events
+
+ /* No special register assumptions. */