summaryrefslogtreecommitdiff
path: root/arch/mips/lib
diff options
context:
space:
mode:
Diffstat (limited to 'arch/mips/lib')
-rw-r--r--arch/mips/lib/bitops.c14
-rw-r--r--arch/mips/lib/dump_tlb.c5
-rw-r--r--arch/mips/lib/memset.S84
-rw-r--r--arch/mips/lib/mips-atomic.c149
-rw-r--r--arch/mips/lib/r3k_dump_tlb.c7
-rw-r--r--arch/mips/lib/strlen_user.S9
-rw-r--r--arch/mips/lib/strncpy_user.S32
-rw-r--r--arch/mips/lib/strnlen_user.S2
8 files changed, 175 insertions, 127 deletions
diff --git a/arch/mips/lib/bitops.c b/arch/mips/lib/bitops.c
index a64daee740ee..3b2a1e78a543 100644
--- a/arch/mips/lib/bitops.c
+++ b/arch/mips/lib/bitops.c
@@ -19,7 +19,7 @@
*/
void __mips_set_bit(unsigned long nr, volatile unsigned long *addr)
{
- volatile unsigned long *a = addr;
+ unsigned long *a = (unsigned long *)addr;
unsigned bit = nr & SZLONG_MASK;
unsigned long mask;
unsigned long flags;
@@ -41,7 +41,7 @@ EXPORT_SYMBOL(__mips_set_bit);
*/
void __mips_clear_bit(unsigned long nr, volatile unsigned long *addr)
{
- volatile unsigned long *a = addr;
+ unsigned long *a = (unsigned long *)addr;
unsigned bit = nr & SZLONG_MASK;
unsigned long mask;
unsigned long flags;
@@ -63,7 +63,7 @@ EXPORT_SYMBOL(__mips_clear_bit);
*/
void __mips_change_bit(unsigned long nr, volatile unsigned long *addr)
{
- volatile unsigned long *a = addr;
+ unsigned long *a = (unsigned long *)addr;
unsigned bit = nr & SZLONG_MASK;
unsigned long mask;
unsigned long flags;
@@ -86,7 +86,7 @@ EXPORT_SYMBOL(__mips_change_bit);
int __mips_test_and_set_bit(unsigned long nr,
volatile unsigned long *addr)
{
- volatile unsigned long *a = addr;
+ unsigned long *a = (unsigned long *)addr;
unsigned bit = nr & SZLONG_MASK;
unsigned long mask;
unsigned long flags;
@@ -112,7 +112,7 @@ EXPORT_SYMBOL(__mips_test_and_set_bit);
int __mips_test_and_set_bit_lock(unsigned long nr,
volatile unsigned long *addr)
{
- volatile unsigned long *a = addr;
+ unsigned long *a = (unsigned long *)addr;
unsigned bit = nr & SZLONG_MASK;
unsigned long mask;
unsigned long flags;
@@ -137,7 +137,7 @@ EXPORT_SYMBOL(__mips_test_and_set_bit_lock);
*/
int __mips_test_and_clear_bit(unsigned long nr, volatile unsigned long *addr)
{
- volatile unsigned long *a = addr;
+ unsigned long *a = (unsigned long *)addr;
unsigned bit = nr & SZLONG_MASK;
unsigned long mask;
unsigned long flags;
@@ -162,7 +162,7 @@ EXPORT_SYMBOL(__mips_test_and_clear_bit);
*/
int __mips_test_and_change_bit(unsigned long nr, volatile unsigned long *addr)
{
- volatile unsigned long *a = addr;
+ unsigned long *a = (unsigned long *)addr;
unsigned bit = nr & SZLONG_MASK;
unsigned long mask;
unsigned long flags;
diff --git a/arch/mips/lib/dump_tlb.c b/arch/mips/lib/dump_tlb.c
index 32b9f21bfd85..8a12d00908e0 100644
--- a/arch/mips/lib/dump_tlb.c
+++ b/arch/mips/lib/dump_tlb.c
@@ -11,6 +11,7 @@
#include <asm/page.h>
#include <asm/pgtable.h>
#include <asm/tlbdebug.h>
+#include <asm/mmu_context.h>
static inline const char *msk2str(unsigned int mask)
{
@@ -55,7 +56,7 @@ static void dump_tlb(int first, int last)
s_pagemask = read_c0_pagemask();
s_entryhi = read_c0_entryhi();
s_index = read_c0_index();
- asid = s_entryhi & 0xff;
+ asid = ASID_MASK(s_entryhi);
for (i = first; i <= last; i++) {
write_c0_index(i);
@@ -85,7 +86,7 @@ static void dump_tlb(int first, int last)
printk("va=%0*lx asid=%02lx\n",
width, (entryhi & ~0x1fffUL),
- entryhi & 0xff);
+ ASID_MASK(entryhi));
printk("\t[pa=%0*llx c=%d d=%d v=%d g=%d] ",
width,
(entrylo0 << 6) & PAGE_MASK, c0,
diff --git a/arch/mips/lib/memset.S b/arch/mips/lib/memset.S
index 053d3b0b0317..0580194e7402 100644
--- a/arch/mips/lib/memset.S
+++ b/arch/mips/lib/memset.S
@@ -5,7 +5,8 @@
*
* Copyright (C) 1998, 1999, 2000 by Ralf Baechle
* Copyright (C) 1999, 2000 Silicon Graphics, Inc.
- * Copyright (C) 2007 Maciej W. Rozycki
+ * Copyright (C) 2007 by Maciej W. Rozycki
+ * Copyright (C) 2011, 2012 MIPS Technologies, Inc.
*/
#include <asm/asm.h>
#include <asm/asm-offsets.h>
@@ -19,6 +20,20 @@
#define LONG_S_R sdr
#endif
+#ifdef CONFIG_CPU_MICROMIPS
+#define STORSIZE (LONGSIZE * 2)
+#define STORMASK (STORSIZE - 1)
+#define FILL64RG t8
+#define FILLPTRG t7
+#undef LONG_S
+#define LONG_S LONG_SP
+#else
+#define STORSIZE LONGSIZE
+#define STORMASK LONGMASK
+#define FILL64RG a1
+#define FILLPTRG t0
+#endif
+
#define EX(insn,reg,addr,handler) \
9: insn reg, addr; \
.section __ex_table,"a"; \
@@ -26,23 +41,25 @@
.previous
.macro f_fill64 dst, offset, val, fixup
- EX(LONG_S, \val, (\offset + 0 * LONGSIZE)(\dst), \fixup)
- EX(LONG_S, \val, (\offset + 1 * LONGSIZE)(\dst), \fixup)
- EX(LONG_S, \val, (\offset + 2 * LONGSIZE)(\dst), \fixup)
- EX(LONG_S, \val, (\offset + 3 * LONGSIZE)(\dst), \fixup)
- EX(LONG_S, \val, (\offset + 4 * LONGSIZE)(\dst), \fixup)
- EX(LONG_S, \val, (\offset + 5 * LONGSIZE)(\dst), \fixup)
- EX(LONG_S, \val, (\offset + 6 * LONGSIZE)(\dst), \fixup)
- EX(LONG_S, \val, (\offset + 7 * LONGSIZE)(\dst), \fixup)
-#if LONGSIZE == 4
- EX(LONG_S, \val, (\offset + 8 * LONGSIZE)(\dst), \fixup)
- EX(LONG_S, \val, (\offset + 9 * LONGSIZE)(\dst), \fixup)
- EX(LONG_S, \val, (\offset + 10 * LONGSIZE)(\dst), \fixup)
- EX(LONG_S, \val, (\offset + 11 * LONGSIZE)(\dst), \fixup)
- EX(LONG_S, \val, (\offset + 12 * LONGSIZE)(\dst), \fixup)
- EX(LONG_S, \val, (\offset + 13 * LONGSIZE)(\dst), \fixup)
- EX(LONG_S, \val, (\offset + 14 * LONGSIZE)(\dst), \fixup)
- EX(LONG_S, \val, (\offset + 15 * LONGSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 0 * STORSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 1 * STORSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 2 * STORSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 3 * STORSIZE)(\dst), \fixup)
+#if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS))
+ EX(LONG_S, \val, (\offset + 4 * STORSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 5 * STORSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 6 * STORSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 7 * STORSIZE)(\dst), \fixup)
+#endif
+#if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4))
+ EX(LONG_S, \val, (\offset + 8 * STORSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 9 * STORSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup)
+ EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup)
#endif
.endm
@@ -71,16 +88,20 @@ LEAF(memset)
1:
FEXPORT(__bzero)
- sltiu t0, a2, LONGSIZE /* very small region? */
+ sltiu t0, a2, STORSIZE /* very small region? */
bnez t0, .Lsmall_memset
- andi t0, a0, LONGMASK /* aligned? */
+ andi t0, a0, STORMASK /* aligned? */
+#ifdef CONFIG_CPU_MICROMIPS
+ move t8, a1 /* used by 'swp' instruction */
+ move t9, a1
+#endif
#ifndef CONFIG_CPU_DADDI_WORKAROUNDS
beqz t0, 1f
- PTR_SUBU t0, LONGSIZE /* alignment in bytes */
+ PTR_SUBU t0, STORSIZE /* alignment in bytes */
#else
.set noat
- li AT, LONGSIZE
+ li AT, STORSIZE
beqz t0, 1f
PTR_SUBU t0, AT /* alignment in bytes */
.set at
@@ -99,24 +120,27 @@ FEXPORT(__bzero)
1: ori t1, a2, 0x3f /* # of full blocks */
xori t1, 0x3f
beqz t1, .Lmemset_partial /* no block to fill */
- andi t0, a2, 0x40-LONGSIZE
+ andi t0, a2, 0x40-STORSIZE
PTR_ADDU t1, a0 /* end address */
.set reorder
1: PTR_ADDIU a0, 64
R10KCBARRIER(0(ra))
- f_fill64 a0, -64, a1, .Lfwd_fixup
+ f_fill64 a0, -64, FILL64RG, .Lfwd_fixup
bne t1, a0, 1b
.set noreorder
.Lmemset_partial:
R10KCBARRIER(0(ra))
PTR_LA t1, 2f /* where to start */
+#ifdef CONFIG_CPU_MICROMIPS
+ LONG_SRL t7, t0, 1
+#endif
#if LONGSIZE == 4
- PTR_SUBU t1, t0
+ PTR_SUBU t1, FILLPTRG
#else
.set noat
- LONG_SRL AT, t0, 1
+ LONG_SRL AT, FILLPTRG, 1
PTR_SUBU t1, AT
.set at
#endif
@@ -126,9 +150,9 @@ FEXPORT(__bzero)
.set push
.set noreorder
.set nomacro
- f_fill64 a0, -64, a1, .Lpartial_fixup /* ... but first do longs ... */
+ f_fill64 a0, -64, FILL64RG, .Lpartial_fixup /* ... but first do longs ... */
2: .set pop
- andi a2, LONGMASK /* At most one long to go */
+ andi a2, STORMASK /* At most one long to go */
beqz a2, 1f
PTR_ADDU a0, a2 /* What's left */
@@ -169,7 +193,7 @@ FEXPORT(__bzero)
.Lpartial_fixup:
PTR_L t0, TI_TASK($28)
- andi a2, LONGMASK
+ andi a2, STORMASK
LONG_L t0, THREAD_BUADDR(t0)
LONG_ADDU a2, t1
jr ra
@@ -177,4 +201,4 @@ FEXPORT(__bzero)
.Llast_fixup:
jr ra
- andi v1, a2, LONGMASK
+ andi v1, a2, STORMASK
diff --git a/arch/mips/lib/mips-atomic.c b/arch/mips/lib/mips-atomic.c
index cd160be3ce4d..6807f7172eaf 100644
--- a/arch/mips/lib/mips-atomic.c
+++ b/arch/mips/lib/mips-atomic.c
@@ -13,6 +13,7 @@
#include <linux/compiler.h>
#include <linux/preempt.h>
#include <linux/export.h>
+#include <linux/stringify.h>
#if !defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_MIPS_MT_SMTC)
@@ -34,8 +35,11 @@
*
* Workaround: mask EXL bit of the result or place a nop before mfc0.
*/
-__asm__(
- " .macro arch_local_irq_disable\n"
+notrace void arch_local_irq_disable(void)
+{
+ preempt_disable();
+
+ __asm__ __volatile__(
" .set push \n"
" .set noat \n"
#ifdef CONFIG_MIPS_MT_SMTC
@@ -52,108 +56,98 @@ __asm__(
" .set noreorder \n"
" mtc0 $1,$12 \n"
#endif
- " irq_disable_hazard \n"
+ " " __stringify(__irq_disable_hazard) " \n"
" .set pop \n"
- " .endm \n");
+ : /* no outputs */
+ : /* no inputs */
+ : "memory");
-notrace void arch_local_irq_disable(void)
-{
- preempt_disable();
- __asm__ __volatile__(
- "arch_local_irq_disable"
- : /* no outputs */
- : /* no inputs */
- : "memory");
preempt_enable();
}
EXPORT_SYMBOL(arch_local_irq_disable);
-__asm__(
- " .macro arch_local_irq_save result \n"
+notrace unsigned long arch_local_irq_save(void)
+{
+ unsigned long flags;
+
+ preempt_disable();
+
+ __asm__ __volatile__(
" .set push \n"
" .set reorder \n"
" .set noat \n"
#ifdef CONFIG_MIPS_MT_SMTC
- " mfc0 \\result, $2, 1 \n"
- " ori $1, \\result, 0x400 \n"
+ " mfc0 %[flags], $2, 1 \n"
+ " ori $1, %[flags], 0x400 \n"
" .set noreorder \n"
" mtc0 $1, $2, 1 \n"
- " andi \\result, \\result, 0x400 \n"
+ " andi %[flags], %[flags], 0x400 \n"
#elif defined(CONFIG_CPU_MIPSR2)
/* see irqflags.h for inline function */
#else
- " mfc0 \\result, $12 \n"
- " ori $1, \\result, 0x1f \n"
+ " mfc0 %[flags], $12 \n"
+ " ori $1, %[flags], 0x1f \n"
" xori $1, 0x1f \n"
" .set noreorder \n"
" mtc0 $1, $12 \n"
#endif
- " irq_disable_hazard \n"
+ " " __stringify(__irq_disable_hazard) " \n"
" .set pop \n"
- " .endm \n");
+ : [flags] "=r" (flags)
+ : /* no inputs */
+ : "memory");
-notrace unsigned long arch_local_irq_save(void)
-{
- unsigned long flags;
- preempt_disable();
- asm volatile("arch_local_irq_save\t%0"
- : "=r" (flags)
- : /* no inputs */
- : "memory");
preempt_enable();
+
return flags;
}
EXPORT_SYMBOL(arch_local_irq_save);
+notrace void arch_local_irq_restore(unsigned long flags)
+{
+ unsigned long __tmp1;
+
+#ifdef CONFIG_MIPS_MT_SMTC
+ /*
+ * SMTC kernel needs to do a software replay of queued
+ * IPIs, at the cost of branch and call overhead on each
+ * local_irq_restore()
+ */
+ if (unlikely(!(flags & 0x0400)))
+ smtc_ipi_replay();
+#endif
+ preempt_disable();
-__asm__(
- " .macro arch_local_irq_restore flags \n"
+ __asm__ __volatile__(
" .set push \n"
" .set noreorder \n"
" .set noat \n"
#ifdef CONFIG_MIPS_MT_SMTC
- "mfc0 $1, $2, 1 \n"
- "andi \\flags, 0x400 \n"
- "ori $1, 0x400 \n"
- "xori $1, 0x400 \n"
- "or \\flags, $1 \n"
- "mtc0 \\flags, $2, 1 \n"
+ " mfc0 $1, $2, 1 \n"
+ " andi %[flags], 0x400 \n"
+ " ori $1, 0x400 \n"
+ " xori $1, 0x400 \n"
+ " or %[flags], $1 \n"
+ " mtc0 %[flags], $2, 1 \n"
#elif defined(CONFIG_CPU_MIPSR2) && defined(CONFIG_IRQ_CPU)
/* see irqflags.h for inline function */
#elif defined(CONFIG_CPU_MIPSR2)
/* see irqflags.h for inline function */
#else
" mfc0 $1, $12 \n"
- " andi \\flags, 1 \n"
+ " andi %[flags], 1 \n"
" ori $1, 0x1f \n"
" xori $1, 0x1f \n"
- " or \\flags, $1 \n"
- " mtc0 \\flags, $12 \n"
+ " or %[flags], $1 \n"
+ " mtc0 %[flags], $12 \n"
#endif
- " irq_disable_hazard \n"
+ " " __stringify(__irq_disable_hazard) " \n"
" .set pop \n"
- " .endm \n");
+ : [flags] "=r" (__tmp1)
+ : "0" (flags)
+ : "memory");
-notrace void arch_local_irq_restore(unsigned long flags)
-{
- unsigned long __tmp1;
-
-#ifdef CONFIG_MIPS_MT_SMTC
- /*
- * SMTC kernel needs to do a software replay of queued
- * IPIs, at the cost of branch and call overhead on each
- * local_irq_restore()
- */
- if (unlikely(!(flags & 0x0400)))
- smtc_ipi_replay();
-#endif
- preempt_disable();
- __asm__ __volatile__(
- "arch_local_irq_restore\t%0"
- : "=r" (__tmp1)
- : "0" (flags)
- : "memory");
preempt_enable();
}
EXPORT_SYMBOL(arch_local_irq_restore);
@@ -164,11 +158,36 @@ notrace void __arch_local_irq_restore(unsigned long flags)
unsigned long __tmp1;
preempt_disable();
+
__asm__ __volatile__(
- "arch_local_irq_restore\t%0"
- : "=r" (__tmp1)
- : "0" (flags)
- : "memory");
+ " .set push \n"
+ " .set noreorder \n"
+ " .set noat \n"
+#ifdef CONFIG_MIPS_MT_SMTC
+ " mfc0 $1, $2, 1 \n"
+ " andi %[flags], 0x400 \n"
+ " ori $1, 0x400 \n"
+ " xori $1, 0x400 \n"
+ " or %[flags], $1 \n"
+ " mtc0 %[flags], $2, 1 \n"
+#elif defined(CONFIG_CPU_MIPSR2) && defined(CONFIG_IRQ_CPU)
+ /* see irqflags.h for inline function */
+#elif defined(CONFIG_CPU_MIPSR2)
+ /* see irqflags.h for inline function */
+#else
+ " mfc0 $1, $12 \n"
+ " andi %[flags], 1 \n"
+ " ori $1, 0x1f \n"
+ " xori $1, 0x1f \n"
+ " or %[flags], $1 \n"
+ " mtc0 %[flags], $12 \n"
+#endif
+ " " __stringify(__irq_disable_hazard) " \n"
+ " .set pop \n"
+ : [flags] "=r" (__tmp1)
+ : "0" (flags)
+ : "memory");
+
preempt_enable();
}
EXPORT_SYMBOL(__arch_local_irq_restore);
diff --git a/arch/mips/lib/r3k_dump_tlb.c b/arch/mips/lib/r3k_dump_tlb.c
index 91615c2ef0cf..8327698b9937 100644
--- a/arch/mips/lib/r3k_dump_tlb.c
+++ b/arch/mips/lib/r3k_dump_tlb.c
@@ -9,6 +9,7 @@
#include <linux/mm.h>
#include <asm/mipsregs.h>
+#include <asm/mmu_context.h>
#include <asm/page.h>
#include <asm/pgtable.h>
#include <asm/tlbdebug.h>
@@ -21,7 +22,7 @@ static void dump_tlb(int first, int last)
unsigned int asid;
unsigned long entryhi, entrylo0;
- asid = read_c0_entryhi() & 0xfc0;
+ asid = ASID_MASK(read_c0_entryhi());
for (i = first; i <= last; i++) {
write_c0_index(i<<8);
@@ -35,7 +36,7 @@ static void dump_tlb(int first, int last)
/* Unused entries have a virtual address of KSEG0. */
if ((entryhi & 0xffffe000) != 0x80000000
- && (entryhi & 0xfc0) == asid) {
+ && (ASID_MASK(entryhi) == asid)) {
/*
* Only print entries in use
*/
@@ -44,7 +45,7 @@ static void dump_tlb(int first, int last)
printk("va=%08lx asid=%08lx"
" [pa=%06lx n=%d d=%d v=%d g=%d]",
(entryhi & 0xffffe000),
- entryhi & 0xfc0,
+ ASID_MASK(entryhi),
entrylo0 & PAGE_MASK,
(entrylo0 & (1 << 11)) ? 1 : 0,
(entrylo0 & (1 << 10)) ? 1 : 0,
diff --git a/arch/mips/lib/strlen_user.S b/arch/mips/lib/strlen_user.S
index fdbb970f670d..e362dcdc69d1 100644
--- a/arch/mips/lib/strlen_user.S
+++ b/arch/mips/lib/strlen_user.S
@@ -3,8 +3,9 @@
* License. See the file "COPYING" in the main directory of this archive
* for more details.
*
- * Copyright (c) 1996, 1998, 1999, 2004 by Ralf Baechle
- * Copyright (c) 1999 Silicon Graphics, Inc.
+ * Copyright (C) 1996, 1998, 1999, 2004 by Ralf Baechle
+ * Copyright (C) 1999 Silicon Graphics, Inc.
+ * Copyright (C) 2011 MIPS Technologies, Inc.
*/
#include <asm/asm.h>
#include <asm/asm-offsets.h>
@@ -28,9 +29,9 @@ LEAF(__strlen_user_asm)
FEXPORT(__strlen_user_nocheck_asm)
move v0, a0
-1: EX(lb, t0, (v0), .Lfault)
+1: EX(lbu, v1, (v0), .Lfault)
PTR_ADDIU v0, 1
- bnez t0, 1b
+ bnez v1, 1b
PTR_SUBU v0, a0
jr ra
END(__strlen_user_asm)
diff --git a/arch/mips/lib/strncpy_user.S b/arch/mips/lib/strncpy_user.S
index bad539487503..92870b6b53ea 100644
--- a/arch/mips/lib/strncpy_user.S
+++ b/arch/mips/lib/strncpy_user.S
@@ -3,7 +3,8 @@
* License. See the file "COPYING" in the main directory of this archive
* for more details.
*
- * Copyright (c) 1996, 1999 by Ralf Baechle
+ * Copyright (C) 1996, 1999 by Ralf Baechle
+ * Copyright (C) 2011 MIPS Technologies, Inc.
*/
#include <linux/errno.h>
#include <asm/asm.h>
@@ -33,26 +34,27 @@ LEAF(__strncpy_from_user_asm)
bnez v0, .Lfault
FEXPORT(__strncpy_from_user_nocheck_asm)
- move v0, zero
- move v1, a1
.set noreorder
-1: EX(lbu, t0, (v1), .Lfault)
+ move t0, zero
+ move v1, a1
+1: EX(lbu, v0, (v1), .Lfault)
PTR_ADDIU v1, 1
R10KCBARRIER(0(ra))
- beqz t0, 2f
- sb t0, (a0)
- PTR_ADDIU v0, 1
- .set reorder
- PTR_ADDIU a0, 1
- bne v0, a2, 1b
-2: PTR_ADDU t0, a1, v0
- xor t0, a1
- bltz t0, .Lfault
+ beqz v0, 2f
+ sb v0, (a0)
+ PTR_ADDIU t0, 1
+ bne t0, a2, 1b
+ PTR_ADDIU a0, 1
+2: PTR_ADDU v0, a1, t0
+ xor v0, a1
+ bltz v0, .Lfault
+ nop
jr ra # return n
+ move v0, t0
END(__strncpy_from_user_asm)
-.Lfault: li v0, -EFAULT
- jr ra
+.Lfault: jr ra
+ li v0, -EFAULT
.section __ex_table,"a"
PTR 1b, .Lfault
diff --git a/arch/mips/lib/strnlen_user.S b/arch/mips/lib/strnlen_user.S
index beea03c8c0ce..fcacea5e61f1 100644
--- a/arch/mips/lib/strnlen_user.S
+++ b/arch/mips/lib/strnlen_user.S
@@ -35,7 +35,7 @@ FEXPORT(__strnlen_user_nocheck_asm)
PTR_ADDU a1, a0 # stop pointer
1: beq v0, a1, 1f # limit reached?
EX(lb, t0, (v0), .Lfault)
- PTR_ADDU v0, 1
+ PTR_ADDIU v0, 1
bnez t0, 1b
1: PTR_SUBU v0, a0
jr ra