diff options
author | Al Viro <viro@zeniv.linux.org.uk> | 2020-07-20 10:09:24 -0400 |
---|---|---|
committer | Al Viro <viro@zeniv.linux.org.uk> | 2020-08-20 15:45:22 -0400 |
commit | 70d65cd555c5e43c613700f604a47f7ebcf7b6f1 (patch) | |
tree | 5c0494b0c4bec1c753e158cf7b25e9f1ead37088 /arch/powerpc/include/asm/checksum.h | |
parent | daf52375c19feb4397cfd883302a7c907de2d6ad (diff) |
ppc: propagate the calling conventions change down to csum_partial_copy_generic()
... and get rid of the pointless fallback in the wrappers. On error it used
to zero the unwritten area and calculate the csum of the entire thing. Not
wanting to do it in assembler part had been very reasonable; doing that in
the first place, OTOH... In case of an error the caller discards the data
we'd copied, along with whatever checksum it might've had.
Signed-off-by: Al Viro <viro@zeniv.linux.org.uk>
Diffstat (limited to 'arch/powerpc/include/asm/checksum.h')
-rw-r--r-- | arch/powerpc/include/asm/checksum.h | 6 |
1 files changed, 2 insertions, 4 deletions
diff --git a/arch/powerpc/include/asm/checksum.h b/arch/powerpc/include/asm/checksum.h index dba685d984c0..82f099ba2411 100644 --- a/arch/powerpc/include/asm/checksum.h +++ b/arch/powerpc/include/asm/checksum.h @@ -18,9 +18,7 @@ * Like csum_partial, this must be called with even lengths, * except for the last fragment. */ -extern __wsum csum_partial_copy_generic(const void *src, void *dst, - int len, __wsum sum, - int *src_err, int *dst_err); +extern __wsum csum_partial_copy_generic(const void *src, void *dst, int len); #define _HAVE_ARCH_COPY_AND_CSUM_FROM_USER extern __wsum csum_and_copy_from_user(const void __user *src, void *dst, @@ -31,7 +29,7 @@ extern __wsum csum_and_copy_to_user(const void *src, void __user *dst, #define _HAVE_ARCH_CSUM_AND_COPY #define csum_partial_copy_nocheck(src, dst, len) \ - csum_partial_copy_generic((src), (dst), (len), 0, NULL, NULL) + csum_partial_copy_generic((src), (dst), (len)) /* |