summaryrefslogtreecommitdiff
path: root/arch
diff options
context:
space:
mode:
authorJim Quinlan <jim2101024@gmail.com>2012-09-06 15:36:54 (GMT)
committerRalf Baechle <ralf@linux-mips.org>2012-11-09 09:58:50 (GMT)
commit9de79c500600c5868e83712a2ea5b0b48f83af24 (patch)
tree8e8de0d640f65157415b155e9821369d92745d71 /arch
parent34d875d7b579baab2f2f6dfd8e8533602d9dbf33 (diff)
downloadlinux-9de79c500600c5868e83712a2ea5b0b48f83af24.tar.xz
MIPS: bitops.h: Change use of 'unsigned short' to 'int'
[ralf@linux-mips.org: No functional change but it's consistent with how use types elsewhere in the code.] Signed-off-by: Jim Quinlan <jim2101024@gmail.com> Cc: linux-mips@linux-mips.org Cc: David Daney <ddaney.cavm@gmail.com> Cc: Kevin Cernekee cernekee@gmail.com Cc: Jim Quinlan <jim2101024@gmail.com> Patchwork: https://patchwork.linux-mips.org/patch/4319/ Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
Diffstat (limited to 'arch')
-rw-r--r--arch/mips/include/asm/bitops.h14
1 files changed, 7 insertions, 7 deletions
diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h
index 82ad35c..455664c 100644
--- a/arch/mips/include/asm/bitops.h
+++ b/arch/mips/include/asm/bitops.h
@@ -57,7 +57,7 @@
static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
{
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
- unsigned short bit = nr & SZLONG_MASK;
+ int bit = nr & SZLONG_MASK;
unsigned long temp;
if (kernel_uses_llsc && R10000_LLSC_WAR) {
@@ -118,7 +118,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
{
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
- unsigned short bit = nr & SZLONG_MASK;
+ int bit = nr & SZLONG_MASK;
unsigned long temp;
if (kernel_uses_llsc && R10000_LLSC_WAR) {
@@ -191,7 +191,7 @@ static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *ad
*/
static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
{
- unsigned short bit = nr & SZLONG_MASK;
+ int bit = nr & SZLONG_MASK;
if (kernel_uses_llsc && R10000_LLSC_WAR) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
@@ -244,7 +244,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
static inline int test_and_set_bit(unsigned long nr,
volatile unsigned long *addr)
{
- unsigned short bit = nr & SZLONG_MASK;
+ int bit = nr & SZLONG_MASK;
unsigned long res;
smp_mb__before_llsc();
@@ -310,7 +310,7 @@ static inline int test_and_set_bit(unsigned long nr,
static inline int test_and_set_bit_lock(unsigned long nr,
volatile unsigned long *addr)
{
- unsigned short bit = nr & SZLONG_MASK;
+ int bit = nr & SZLONG_MASK;
unsigned long res;
if (kernel_uses_llsc && R10000_LLSC_WAR) {
@@ -373,7 +373,7 @@ static inline int test_and_set_bit_lock(unsigned long nr,
static inline int test_and_clear_bit(unsigned long nr,
volatile unsigned long *addr)
{
- unsigned short bit = nr & SZLONG_MASK;
+ int bit = nr & SZLONG_MASK;
unsigned long res;
smp_mb__before_llsc();
@@ -457,7 +457,7 @@ static inline int test_and_clear_bit(unsigned long nr,
static inline int test_and_change_bit(unsigned long nr,
volatile unsigned long *addr)
{
- unsigned short bit = nr & SZLONG_MASK;
+ int bit = nr & SZLONG_MASK;
unsigned long res;
smp_mb__before_llsc();