X-Git-Url: http://git.rot13.org/?a=blobdiff_plain;f=include%2Fasm-cris%2Fbitops.h;h=d3eb0f1e42085c7911aaa6f5d4303d38a78cd022;hb=52347f4e810ba323d02cd2c26b5d738f4a2c3d5e;hp=e3da57f97964fac5d2141ed8684c6cd9ed45a63f;hpb=6c89cce75c6f93088a5a2a25bb9674a9194592cc;p=powerpc.git diff --git a/include/asm-cris/bitops.h b/include/asm-cris/bitops.h index e3da57f979..d3eb0f1e42 100644 --- a/include/asm-cris/bitops.h +++ b/include/asm-cris/bitops.h @@ -89,7 +89,7 @@ struct __dummy { unsigned long a[100]; }; * It also implies a memory barrier. */ -extern inline int test_and_set_bit(int nr, volatile unsigned long *addr) +static inline int test_and_set_bit(int nr, volatile unsigned long *addr) { unsigned int mask, retval; unsigned long flags; @@ -105,7 +105,7 @@ extern inline int test_and_set_bit(int nr, volatile unsigned long *addr) return retval; } -extern inline int __test_and_set_bit(int nr, volatile unsigned long *addr) +static inline int __test_and_set_bit(int nr, volatile unsigned long *addr) { unsigned int mask, retval; unsigned int *adr = (unsigned int *)addr; @@ -132,7 +132,7 @@ extern inline int __test_and_set_bit(int nr, volatile unsigned long *addr) * It also implies a memory barrier. */ -extern inline int test_and_clear_bit(int nr, volatile unsigned long *addr) +static inline int test_and_clear_bit(int nr, volatile unsigned long *addr) { unsigned int mask, retval; unsigned long flags; @@ -157,7 +157,7 @@ extern inline int test_and_clear_bit(int nr, volatile unsigned long *addr) * but actually fail. You must protect multiple accesses with a lock. */ -extern inline int __test_and_clear_bit(int nr, volatile unsigned long *addr) +static inline int __test_and_clear_bit(int nr, volatile unsigned long *addr) { unsigned int mask, retval; unsigned int *adr = (unsigned int *)addr; @@ -177,7 +177,7 @@ extern inline int __test_and_clear_bit(int nr, volatile unsigned long *addr) * It also implies a memory barrier. */ -extern inline int test_and_change_bit(int nr, volatile unsigned long *addr) +static inline int test_and_change_bit(int nr, volatile unsigned long *addr) { unsigned int mask, retval; unsigned long flags; @@ -193,7 +193,7 @@ extern inline int test_and_change_bit(int nr, volatile unsigned long *addr) /* WARNING: non atomic and it can be reordered! */ -extern inline int __test_and_change_bit(int nr, volatile unsigned long *addr) +static inline int __test_and_change_bit(int nr, volatile unsigned long *addr) { unsigned int mask, retval; unsigned int *adr = (unsigned int *)addr; @@ -214,7 +214,7 @@ extern inline int __test_and_change_bit(int nr, volatile unsigned long *addr) * This routine doesn't need to be atomic. */ -extern inline int test_bit(int nr, const volatile unsigned long *addr) +static inline int test_bit(int nr, const volatile unsigned long *addr) { unsigned int mask; unsigned int *adr = (unsigned int *)addr; @@ -240,6 +240,7 @@ extern inline int test_bit(int nr, const volatile unsigned long *addr) */ #define fls(x) generic_fls(x) +#define fls64(x) generic_fls64(x) /* * hweightN - returns the hamming weight of a N-bit word @@ -258,7 +259,7 @@ extern inline int test_bit(int nr, const volatile unsigned long *addr) * @offset: The bitnumber to start searching at * @size: The maximum size to search */ -extern inline int find_next_zero_bit (const unsigned long * addr, int size, int offset) +static inline int find_next_zero_bit (const unsigned long * addr, int size, int offset) { unsigned long *p = ((unsigned long *) addr) + (offset >> 5); unsigned long result = offset & ~31UL; @@ -366,7 +367,7 @@ found_middle: #define minix_test_bit(nr,addr) test_bit(nr,addr) #define minix_find_first_zero_bit(addr,size) find_first_zero_bit(addr,size) -extern inline int sched_find_first_bit(const unsigned long *b) +static inline int sched_find_first_bit(const unsigned long *b) { if (unlikely(b[0])) return __ffs(b[0]);