LoongArch: Adjust misc routines for 32BIT/64BIT

Adjust misc routines for both 32BIT and 64BIT, including: bitops, bswap,
checksum, string, jump label, unaligned access emulator, suspend/wakeup
routines, etc.

Reviewed-by: Arnd Bergmann <arnd@arndb.de>
Signed-off-by: Jiaxun Yang <jiaxun.yang@flygoat.com>
Signed-off-by: Huacai Chen <chenhuacai@loongson.cn>
master
Huacai Chen 2025-12-08 18:09:17 +08:00
parent 48c7294775
commit b15dfdacd9
10 changed files with 151 additions and 82 deletions

View File

@ -13,11 +13,22 @@
#include <asm/barrier.h>
#ifdef CONFIG_32BIT_REDUCED
#include <asm-generic/bitops/ffs.h>
#include <asm-generic/bitops/fls.h>
#include <asm-generic/bitops/__ffs.h>
#include <asm-generic/bitops/__fls.h>
#else /* CONFIG_32BIT_STANDARD || CONFIG_64BIT */
#include <asm-generic/bitops/builtin-ffs.h>
#include <asm-generic/bitops/builtin-fls.h>
#include <asm-generic/bitops/builtin-__ffs.h>
#include <asm-generic/bitops/builtin-__fls.h>
#endif
#include <asm-generic/bitops/ffz.h>
#include <asm-generic/bitops/fls64.h>

View File

@ -9,6 +9,8 @@
#include <linux/bitops.h>
#include <linux/in6.h>
#ifdef CONFIG_64BIT
#define _HAVE_ARCH_IPV6_CSUM
__sum16 csum_ipv6_magic(const struct in6_addr *saddr,
const struct in6_addr *daddr,
@ -61,6 +63,8 @@ static inline __sum16 ip_fast_csum(const void *iph, unsigned int ihl)
extern unsigned int do_csum(const unsigned char *buff, int len);
#define do_csum do_csum
#endif
#include <asm-generic/checksum.h>
#endif /* __ASM_CHECKSUM_H */

View File

@ -10,15 +10,23 @@
#ifndef __ASSEMBLER__
#include <linux/types.h>
#include <linux/stringify.h>
#include <asm/asm.h>
#define JUMP_LABEL_NOP_SIZE 4
#ifdef CONFIG_32BIT
#define JUMP_LABEL_TYPE ".long "
#else
#define JUMP_LABEL_TYPE ".quad "
#endif
/* This macro is also expanded on the Rust side. */
#define JUMP_TABLE_ENTRY(key, label) \
".pushsection __jump_table, \"aw\" \n\t" \
".align 3 \n\t" \
".align " __stringify(PTRLOG) " \n\t" \
".long 1b - ., " label " - . \n\t" \
".quad " key " - . \n\t" \
JUMP_LABEL_TYPE key " - . \n\t" \
".popsection \n\t"
#define ARCH_STATIC_BRANCH_ASM(key, label) \

View File

@ -5,6 +5,7 @@
#ifndef _ASM_STRING_H
#define _ASM_STRING_H
#ifdef CONFIG_64BIT
#define __HAVE_ARCH_MEMSET
extern void *memset(void *__s, int __c, size_t __count);
extern void *__memset(void *__s, int __c, size_t __count);
@ -16,6 +17,7 @@ extern void *__memcpy(void *__to, __const__ void *__from, size_t __n);
#define __HAVE_ARCH_MEMMOVE
extern void *memmove(void *__dest, __const__ void *__src, size_t __n);
extern void *__memmove(void *__dest, __const__ void *__src, size_t __n);
#endif
#if defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__)

View File

@ -27,12 +27,21 @@ static u32 unaligned_instructions_user;
static u32 unaligned_instructions_kernel;
#endif
static inline unsigned long read_fpr(unsigned int idx)
static inline u64 read_fpr(unsigned int idx)
{
#ifdef CONFIG_64BIT
#define READ_FPR(idx, __value) \
__asm__ __volatile__("movfr2gr.d %0, $f"#idx"\n\t" : "=r"(__value));
unsigned long __value;
#else
#define READ_FPR(idx, __value) \
{ \
u32 __value_lo, __value_hi; \
__asm__ __volatile__("movfr2gr.s %0, $f"#idx"\n\t" : "=r"(__value_lo)); \
__asm__ __volatile__("movfrh2gr.s %0, $f"#idx"\n\t" : "=r"(__value_hi)); \
__value = (__value_lo | ((u64)__value_hi << 32)); \
}
#endif
u64 __value;
switch (idx) {
case 0:
@ -138,11 +147,20 @@ static inline unsigned long read_fpr(unsigned int idx)
return __value;
}
static inline void write_fpr(unsigned int idx, unsigned long value)
static inline void write_fpr(unsigned int idx, u64 value)
{
#ifdef CONFIG_64BIT
#define WRITE_FPR(idx, value) \
__asm__ __volatile__("movgr2fr.d $f"#idx", %0\n\t" :: "r"(value));
#else
#define WRITE_FPR(idx, value) \
{ \
u32 value_lo = value; \
u32 value_hi = value >> 32; \
__asm__ __volatile__("movgr2fr.w $f"#idx", %0\n\t" :: "r"(value_lo)); \
__asm__ __volatile__("movgr2frh.w $f"#idx", %0\n\t" :: "r"(value_hi)); \
}
#endif
switch (idx) {
case 0:
WRITE_FPR(0, value);
@ -252,7 +270,7 @@ void emulate_load_store_insn(struct pt_regs *regs, void __user *addr, unsigned i
bool sign, write;
bool user = user_mode(regs);
unsigned int res, size = 0;
unsigned long value = 0;
u64 value = 0;
union loongarch_instruction insn;
perf_sw_event(PERF_COUNT_SW_EMULATION_FAULTS, 1, regs, 0);

View File

@ -0,0 +1,13 @@
// SPDX-License-Identifier: GPL-2.0
#include <linux/export.h>
#include <linux/compiler.h>
#include <uapi/linux/swab.h>
/* To silence -Wmissing-prototypes. */
unsigned long long __bswapdi2(unsigned long long u);
unsigned long long notrace __bswapdi2(unsigned long long u)
{
return ___constant_swab64(u);
}
EXPORT_SYMBOL(__bswapdi2);

View File

@ -0,0 +1,13 @@
// SPDX-License-Identifier: GPL-2.0
#include <linux/export.h>
#include <linux/compiler.h>
#include <uapi/linux/swab.h>
/* To silence -Wmissing-prototypes. */
unsigned int __bswapsi2(unsigned int u);
unsigned int notrace __bswapsi2(unsigned int u)
{
return ___constant_swab32(u);
}
EXPORT_SYMBOL(__bswapsi2);

View File

@ -24,35 +24,35 @@
* a3: sign
*/
SYM_FUNC_START(unaligned_read)
beqz a2, 5f
beqz a2, 5f
li.w t2, 0
addi.d t0, a2, -1
slli.d t1, t0, 3
add.d a0, a0, t0
li.w t2, 0
LONG_ADDI t0, a2, -1
PTR_SLLI t1, t0, LONGLOG
PTR_ADD a0, a0, t0
beqz a3, 2f
1: ld.b t3, a0, 0
b 3f
beqz a3, 2f
1: ld.b t3, a0, 0
b 3f
2: ld.bu t3, a0, 0
3: sll.d t3, t3, t1
or t2, t2, t3
addi.d t1, t1, -8
addi.d a0, a0, -1
addi.d a2, a2, -1
bgtz a2, 2b
4: st.d t2, a1, 0
2: ld.bu t3, a0, 0
3: LONG_SLLV t3, t3, t1
or t2, t2, t3
LONG_ADDI t1, t1, -8
PTR_ADDI a0, a0, -1
PTR_ADDI a2, a2, -1
bgtz a2, 2b
4: LONG_S t2, a1, 0
move a0, a2
jr ra
move a0, a2
jr ra
5: li.w a0, -EFAULT
jr ra
5: li.w a0, -EFAULT
jr ra
_asm_extable 1b, .L_fixup_handle_unaligned
_asm_extable 2b, .L_fixup_handle_unaligned
_asm_extable 4b, .L_fixup_handle_unaligned
_asm_extable 1b, .L_fixup_handle_unaligned
_asm_extable 2b, .L_fixup_handle_unaligned
_asm_extable 4b, .L_fixup_handle_unaligned
SYM_FUNC_END(unaligned_read)
/*
@ -63,21 +63,21 @@ SYM_FUNC_END(unaligned_read)
* a2: n
*/
SYM_FUNC_START(unaligned_write)
beqz a2, 3f
beqz a2, 3f
li.w t0, 0
1: srl.d t1, a1, t0
2: st.b t1, a0, 0
addi.d t0, t0, 8
addi.d a2, a2, -1
addi.d a0, a0, 1
bgtz a2, 1b
li.w t0, 0
1: LONG_SRLV t1, a1, t0
2: st.b t1, a0, 0
LONG_ADDI t0, t0, 8
PTR_ADDI a2, a2, -1
PTR_ADDI a0, a0, 1
bgtz a2, 1b
move a0, a2
jr ra
move a0, a2
jr ra
3: li.w a0, -EFAULT
jr ra
3: li.w a0, -EFAULT
jr ra
_asm_extable 2b, .L_fixup_handle_unaligned
_asm_extable 2b, .L_fixup_handle_unaligned
SYM_FUNC_END(unaligned_write)

View File

@ -72,10 +72,10 @@ static int __init loongson3_acpi_suspend_init(void)
status = acpi_evaluate_integer(NULL, "\\SADR", NULL, &suspend_addr);
if (ACPI_FAILURE(status) || !suspend_addr) {
pr_info("ACPI S3 supported with hardware register default\n");
loongson_sysconf.suspend_addr = (u64)default_suspend_addr;
loongson_sysconf.suspend_addr = (unsigned long)default_suspend_addr;
} else {
pr_info("ACPI S3 supported with Loongson ACPI SADR extension\n");
loongson_sysconf.suspend_addr = (u64)phys_to_virt(PHYSADDR(suspend_addr));
loongson_sysconf.suspend_addr = (unsigned long)phys_to_virt(PHYSADDR(suspend_addr));
}
#endif
return 0;

View File

@ -14,41 +14,41 @@
/* preparatory stuff */
.macro SETUP_SLEEP
addi.d sp, sp, -PT_SIZE
st.d $r1, sp, PT_R1
st.d $r2, sp, PT_R2
st.d $r3, sp, PT_R3
st.d $r4, sp, PT_R4
st.d $r21, sp, PT_R21
st.d $r22, sp, PT_R22
st.d $r23, sp, PT_R23
st.d $r24, sp, PT_R24
st.d $r25, sp, PT_R25
st.d $r26, sp, PT_R26
st.d $r27, sp, PT_R27
st.d $r28, sp, PT_R28
st.d $r29, sp, PT_R29
st.d $r30, sp, PT_R30
st.d $r31, sp, PT_R31
PTR_ADDI sp, sp, -PT_SIZE
REG_S $r1, sp, PT_R1
REG_S $r2, sp, PT_R2
REG_S $r3, sp, PT_R3
REG_S $r4, sp, PT_R4
REG_S $r21, sp, PT_R21
REG_S $r22, sp, PT_R22
REG_S $r23, sp, PT_R23
REG_S $r24, sp, PT_R24
REG_S $r25, sp, PT_R25
REG_S $r26, sp, PT_R26
REG_S $r27, sp, PT_R27
REG_S $r28, sp, PT_R28
REG_S $r29, sp, PT_R29
REG_S $r30, sp, PT_R30
REG_S $r31, sp, PT_R31
.endm
.macro SETUP_WAKEUP
ld.d $r1, sp, PT_R1
ld.d $r2, sp, PT_R2
ld.d $r3, sp, PT_R3
ld.d $r4, sp, PT_R4
ld.d $r21, sp, PT_R21
ld.d $r22, sp, PT_R22
ld.d $r23, sp, PT_R23
ld.d $r24, sp, PT_R24
ld.d $r25, sp, PT_R25
ld.d $r26, sp, PT_R26
ld.d $r27, sp, PT_R27
ld.d $r28, sp, PT_R28
ld.d $r29, sp, PT_R29
ld.d $r30, sp, PT_R30
ld.d $r31, sp, PT_R31
addi.d sp, sp, PT_SIZE
REG_L $r1, sp, PT_R1
REG_L $r2, sp, PT_R2
REG_L $r3, sp, PT_R3
REG_L $r4, sp, PT_R4
REG_L $r21, sp, PT_R21
REG_L $r22, sp, PT_R22
REG_L $r23, sp, PT_R23
REG_L $r24, sp, PT_R24
REG_L $r25, sp, PT_R25
REG_L $r26, sp, PT_R26
REG_L $r27, sp, PT_R27
REG_L $r28, sp, PT_R28
REG_L $r29, sp, PT_R29
REG_L $r30, sp, PT_R30
REG_L $r31, sp, PT_R31
PTR_ADDI sp, sp, PT_SIZE
.endm
.text
@ -59,15 +59,15 @@ SYM_FUNC_START(loongarch_suspend_enter)
SETUP_SLEEP
la.pcrel t0, acpi_saved_sp
st.d sp, t0, 0
REG_S sp, t0, 0
bl __flush_cache_all
/* Pass RA and SP to BIOS */
addi.d a1, sp, 0
PTR_ADDI a1, sp, 0
la.pcrel a0, loongarch_wakeup_start
la.pcrel t0, loongarch_suspend_addr
ld.d t0, t0, 0
REG_L t0, t0, 0
jirl ra, t0, 0 /* Call BIOS's STR sleep routine */
/*
@ -83,7 +83,7 @@ SYM_INNER_LABEL(loongarch_wakeup_start, SYM_L_GLOBAL)
csrwr t0, LOONGARCH_CSR_CRMD
la.pcrel t0, acpi_saved_sp
ld.d sp, t0, 0
REG_L sp, t0, 0
SETUP_WAKEUP
jr ra