2 * linux/arch/arm/lib/csumpartial.S
4 * Copyright (C) 1995-1998 Russell King
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
10 #include <linux/linkage.h>
11 #include <asm/assembler.h>
16 * Function: __u32 csum_partial(const char *src, int len, __u32 sum)
17 * Params : r0 = buffer, r1 = len, r2 = checksum
18 * Returns : r0 = new checksum
25 td1 .req r4 @ save before use
26 td2 .req r5 @ save before use
34 * Handle 0 to 7 bytes, with any alignment of source and
35 * destination pointers. Note that when we get here, C = 0
37 .less8: teq len, #0 @ check for zero count
40 /* we must have at least one byte. */
41 tst buf, #1 @ odd address?
44 adcnes sum, sum, td0, lsl #8
49 /* we are now half-word aligned */
52 #if __LINUX_ARM_ARCH__ >= 4
60 orr td0, td0, td3, lsl #8
66 .less8_byte: tst len, #1 @ odd number of bytes
67 ldrneb td0, [buf], #1 @ include last byte
68 adcnes sum, sum, td0 @ update checksum
70 .done: adc r0, sum, #0 @ collect up the last carry
72 tst td0, #1 @ check buffer alignment
73 movne td0, r0, lsl #8 @ rotate checksum by 8 bits
74 orrne r0, td0, r0, lsr #24
75 ldr pc, [sp], #4 @ return
77 .not_aligned: tst buf, #1 @ odd address
78 ldrneb td0, [buf], #1 @ make even
80 adcnes sum, sum, td0, lsl #8 @ update checksum
82 tst buf, #2 @ 32-bit aligned?
83 #if __LINUX_ARM_ARCH__ >= 4
84 ldrneh td0, [buf] @ make 32-bit aligned
91 orrne td0, td0, ip, lsl #8
93 adcnes sum, sum, td0 @ update checksum
98 cmp len, #8 @ Ensure that we have at least
99 blo .less8 @ 8 bytes to copy.
101 adds sum, sum, #0 @ C = 0
102 tst buf, #3 @ Test destination alignment
103 blne .not_aligned @ aligh destination, return here
109 2: ldmia buf!, {td0, td1, td2, td3}
114 ldmia buf!, {td0, td1, td2, td3}
124 3: tst len, #0x1c @ should not change C
127 4: ldr td0, [buf], #4