Commit 42fc6c6c authored by Josh Poimboeuf's avatar Josh Poimboeuf Committed by Ingo Molnar
Browse files

x86/asm: Don't use RBP as a temporary register in csum_partial_copy_generic()

Andrey Konovalov reported the following warning while fuzzing the kernel
with syzkaller:

  WARNING: kernel stack regs at ffff8800686869f8 in a.out:4933 has bad 'bp' value c3fc855a10167ec0

The unwinder dump revealed that RBP had a bad value when an interrupt
occurred in csum_partial_copy_generic().

That function saves RBP on the stack and then overwrites it, using it as
a scratch register.  That's problematic because it breaks stack traces
if an interrupt occurs in the middle of the function.

Replace the usage of RBP with another callee-saved register (R15) so
stack traces are no longer affected.
Reported-by: default avatarAndrey Konovalov <>
Tested-by: default avatarAndrey Konovalov <>
Signed-off-by: default avatarJosh Poimboeuf <>
Cc: Cong Wang <>
Cc: David S . Miller <>
Cc: Dmitry Vyukov <>
Cc: Eric Dumazet <>
Cc: Kostya Serebryany <>
Cc: Linus Torvalds <>
Cc: Marcelo Ricardo Leitner <>
Cc: Neil Horman <>
Cc: Peter Zijlstra <>
Cc: Thomas Gleixner <>
Cc: Vlad Yasevich <>
Cc: netdev <>
Cc: syzkaller <>

Signed-off-by: default avatarIngo Molnar <>
parent bfb8c6e4
......@@ -55,7 +55,7 @@ ENTRY(csum_partial_copy_generic)
movq %r12, 3*8(%rsp)
movq %r14, 4*8(%rsp)
movq %r13, 5*8(%rsp)
movq %rbp, 6*8(%rsp)
movq %r15, 6*8(%rsp)
movq %r8, (%rsp)
movq %r9, 1*8(%rsp)
......@@ -74,7 +74,7 @@ ENTRY(csum_partial_copy_generic)
/* main loop. clear in 64 byte blocks */
/* r9: zero, r8: temp2, rbx: temp1, rax: sum, rcx: saved length */
/* r11: temp3, rdx: temp4, r12 loopcnt */
/* r10: temp5, rbp: temp6, r14 temp7, r13 temp8 */
/* r10: temp5, r15: temp6, r14 temp7, r13 temp8 */
.p2align 4
......@@ -89,7 +89,7 @@ ENTRY(csum_partial_copy_generic)
movq 32(%rdi), %r10
movq 40(%rdi), %rbp
movq 40(%rdi), %r15
movq 48(%rdi), %r14
......@@ -103,7 +103,7 @@ ENTRY(csum_partial_copy_generic)
adcq %r11, %rax
adcq %rdx, %rax
adcq %r10, %rax
adcq %rbp, %rax
adcq %r15, %rax
adcq %r14, %rax
adcq %r13, %rax
......@@ -121,7 +121,7 @@ ENTRY(csum_partial_copy_generic)
movq %r10, 32(%rsi)
movq %rbp, 40(%rsi)
movq %r15, 40(%rsi)
movq %r14, 48(%rsi)
......@@ -203,7 +203,7 @@ ENTRY(csum_partial_copy_generic)
movq 3*8(%rsp), %r12
movq 4*8(%rsp), %r14
movq 5*8(%rsp), %r13
movq 6*8(%rsp), %rbp
movq 6*8(%rsp), %r15
addq $7*8, %rsp
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment