Skip to content

Commit

Permalink
sh: propage the calling conventions change down to csum_partial_copy_…
Browse files Browse the repository at this point in the history
…generic()

... and get rid of zeroing destination on error there.

Signed-off-by: Al Viro <viro@zeniv.linux.org.uk>
  • Loading branch information
Al Viro committed Aug 20, 2020
1 parent 66aa388 commit dc16c8a
Show file tree
Hide file tree
Showing 2 changed files with 39 additions and 100 deletions.
20 changes: 4 additions & 16 deletions arch/sh/include/asm/checksum_32.h
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,7 @@ asmlinkage __wsum csum_partial(const void *buff, int len, __wsum sum);
* better 64-bit) boundary
*/

asmlinkage __wsum csum_partial_copy_generic(const void *src, void *dst,
int len, __wsum sum,
int *src_err_ptr, int *dst_err_ptr);
asmlinkage __wsum csum_partial_copy_generic(const void *src, void *dst, int len);

#define _HAVE_ARCH_CSUM_AND_COPY
/*
Expand All @@ -45,21 +43,16 @@ asmlinkage __wsum csum_partial_copy_generic(const void *src, void *dst,
static inline
__wsum csum_partial_copy_nocheck(const void *src, void *dst, int len)
{
return csum_partial_copy_generic(src, dst, len, 0, NULL, NULL);
return csum_partial_copy_generic(src, dst, len);
}

#define _HAVE_ARCH_COPY_AND_CSUM_FROM_USER
static inline
__wsum csum_and_copy_from_user(const void __user *src, void *dst, int len)
{
int err = 0;
__wsum sum = ~0U;

if (!access_ok(src, len))
return 0;
sum = csum_partial_copy_generic((__force const void *)src, dst,
len, sum, &err, NULL);
return err ? 0 : sum;
return csum_partial_copy_generic((__force const void *)src, dst, len);
}

/*
Expand Down Expand Up @@ -202,13 +195,8 @@ static inline __wsum csum_and_copy_to_user(const void *src,
void __user *dst,
int len)
{
int err = 0;
__wsum sum = ~0U;

if (!access_ok(dst, len))
return 0;
sum = csum_partial_copy_generic((__force const void *)src,
dst, len, sum, NULL, &err);
return err ? 0 : sum;
return csum_partial_copy_generic((__force const void *)src, dst, len);
}
#endif /* __ASM_SH_CHECKSUM_H */
119 changes: 35 additions & 84 deletions arch/sh/lib/checksum.S
Original file line number Diff line number Diff line change
Expand Up @@ -173,47 +173,27 @@ ENTRY(csum_partial)
mov r6, r0

/*
unsigned int csum_partial_copy_generic (const char *src, char *dst, int len,
int sum, int *src_err_ptr, int *dst_err_ptr)
unsigned int csum_partial_copy_generic (const char *src, char *dst, int len)
*/

/*
* Copy from ds while checksumming, otherwise like csum_partial
*
* The macros SRC and DST specify the type of access for the instruction.
* thus we can call a custom exception handler for all access types.
*
* FIXME: could someone double-check whether I haven't mixed up some SRC and
* DST definitions? It's damn hard to trigger all cases. I hope I got
* them all but there's no guarantee.
* Copy from ds while checksumming, otherwise like csum_partial with initial
* sum being ~0U
*/

#define SRC(...) \
#define EXC(...) \
9999: __VA_ARGS__ ; \
.section __ex_table, "a"; \
.long 9999b, 6001f ; \
.previous

#define DST(...) \
9999: __VA_ARGS__ ; \
.section __ex_table, "a"; \
.long 9999b, 6002f ; \
.previous

!
! r4: const char *SRC
! r5: char *DST
! r6: int LEN
! r7: int SUM
!
! on stack:
! int *SRC_ERR_PTR
! int *DST_ERR_PTR
!
ENTRY(csum_partial_copy_generic)
mov.l r5,@-r15
mov.l r6,@-r15

mov #-1,r7
mov #3,r0 ! Check src and dest are equally aligned
mov r4,r1
and r0,r1
Expand Down Expand Up @@ -243,11 +223,11 @@ ENTRY(csum_partial_copy_generic)
clrt
.align 2
5:
SRC( mov.b @r4+,r1 )
SRC( mov.b @r4+,r0 )
EXC( mov.b @r4+,r1 )
EXC( mov.b @r4+,r0 )
extu.b r1,r1
DST( mov.b r1,@r5 )
DST( mov.b r0,@(1,r5) )
EXC( mov.b r1,@r5 )
EXC( mov.b r0,@(1,r5) )
extu.b r0,r0
add #2,r5

Expand Down Expand Up @@ -276,8 +256,8 @@ DST( mov.b r0,@(1,r5) )
! Handle first two bytes as a special case
.align 2
1:
SRC( mov.w @r4+,r0 )
DST( mov.w r0,@r5 )
EXC( mov.w @r4+,r0 )
EXC( mov.w r0,@r5 )
add #2,r5
extu.w r0,r0
addc r0,r7
Expand All @@ -292,32 +272,32 @@ DST( mov.w r0,@r5 )
clrt
.align 2
1:
SRC( mov.l @r4+,r0 )
SRC( mov.l @r4+,r1 )
EXC( mov.l @r4+,r0 )
EXC( mov.l @r4+,r1 )
addc r0,r7
DST( mov.l r0,@r5 )
DST( mov.l r1,@(4,r5) )
EXC( mov.l r0,@r5 )
EXC( mov.l r1,@(4,r5) )
addc r1,r7

SRC( mov.l @r4+,r0 )
SRC( mov.l @r4+,r1 )
EXC( mov.l @r4+,r0 )
EXC( mov.l @r4+,r1 )
addc r0,r7
DST( mov.l r0,@(8,r5) )
DST( mov.l r1,@(12,r5) )
EXC( mov.l r0,@(8,r5) )
EXC( mov.l r1,@(12,r5) )
addc r1,r7

SRC( mov.l @r4+,r0 )
SRC( mov.l @r4+,r1 )
EXC( mov.l @r4+,r0 )
EXC( mov.l @r4+,r1 )
addc r0,r7
DST( mov.l r0,@(16,r5) )
DST( mov.l r1,@(20,r5) )
EXC( mov.l r0,@(16,r5) )
EXC( mov.l r1,@(20,r5) )
addc r1,r7

SRC( mov.l @r4+,r0 )
SRC( mov.l @r4+,r1 )
EXC( mov.l @r4+,r0 )
EXC( mov.l @r4+,r1 )
addc r0,r7
DST( mov.l r0,@(24,r5) )
DST( mov.l r1,@(28,r5) )
EXC( mov.l r0,@(24,r5) )
EXC( mov.l r1,@(28,r5) )
addc r1,r7
add #32,r5
movt r0
Expand All @@ -335,9 +315,9 @@ DST( mov.l r1,@(28,r5) )
clrt
shlr2 r6
3:
SRC( mov.l @r4+,r0 )
EXC( mov.l @r4+,r0 )
addc r0,r7
DST( mov.l r0,@r5 )
EXC( mov.l r0,@r5 )
add #4,r5
movt r0
dt r6
Expand All @@ -353,8 +333,8 @@ DST( mov.l r0,@r5 )
mov #2,r1
cmp/hs r1,r6
bf 5f
SRC( mov.w @r4+,r0 )
DST( mov.w r0,@r5 )
EXC( mov.w @r4+,r0 )
EXC( mov.w r0,@r5 )
extu.w r0,r0
add #2,r5
cmp/eq r1,r6
Expand All @@ -363,8 +343,8 @@ DST( mov.w r0,@r5 )
shll16 r0
addc r0,r7
5:
SRC( mov.b @r4+,r0 )
DST( mov.b r0,@r5 )
EXC( mov.b @r4+,r0 )
EXC( mov.b r0,@r5 )
extu.b r0,r0
#ifndef __LITTLE_ENDIAN__
shll8 r0
Expand All @@ -373,42 +353,13 @@ DST( mov.b r0,@r5 )
mov #0,r0
addc r0,r7
7:
5000:

# Exception handler:
.section .fixup, "ax"

6001:
mov.l @(8,r15),r0 ! src_err_ptr
mov #-EFAULT,r1
mov.l r1,@r0

! zero the complete destination - computing the rest
! is too much work
mov.l @(4,r15),r5 ! dst
mov.l @r15,r6 ! len
mov #0,r7
1: mov.b r7,@r5
dt r6
bf/s 1b
add #1,r5
mov.l 8000f,r0
jmp @r0
nop
.align 2
8000: .long 5000b

6002:
mov.l @(12,r15),r0 ! dst_err_ptr
mov #-EFAULT,r1
mov.l r1,@r0
mov.l 8001f,r0
jmp @r0
nop
.align 2
8001: .long 5000b

rts
mov #0,r0
.previous
add #8,r15
rts
mov r7,r0

0 comments on commit dc16c8a

Please sign in to comment.