Skip to content

feat: arm64: generic implementation of vector irq #9336

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Aug 27, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions libcpu/aarch64/common/include/asm-generic.h
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,14 @@
.cfi_endproc; \
.size name, .-name;

#define TRACE_SYMBOL(name)

.macro NEVER_RETURN
#ifdef RT_USING_DEBUG
b .
#endif /* RT_USING_DEBUG */
.endm

.macro GET_THREAD_SELF, dst:req
#ifdef ARCH_USING_HW_THREAD_SELF
mrs x0, tpidr_el1
Expand Down
70 changes: 19 additions & 51 deletions libcpu/aarch64/common/include/vector_gcc.h
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
* Change Logs:
* Date Author Notes
* 2024-03-28 Shell Move vector handling codes from context_gcc.S
* 2024-04-08 Shell Optimizing exception switch between u-space/kernel,
*/

#ifndef __ARM64_INC_VECTOR_H__
Expand Down Expand Up @@ -45,8 +46,6 @@
mrs x2, elr_el1

stp x2, x3, [sp, #-0x10]!

mov x0, sp /* Move SP into X0 for saving. */
.endm

#ifdef RT_USING_SMP
Expand All @@ -55,60 +54,29 @@
#include "../up/context_gcc.h"
#endif

.macro RESTORE_IRQ_CONTEXT_WITHOUT_MMU_SWITCH
/* the SP is already ok */
ldp x2, x3, [sp], #0x10 /* SPSR and ELR. */

tst x3, #0x1f
msr spsr_el1, x3
msr elr_el1, x2

ldp x29, x30, [sp], #0x10
msr sp_el0, x29
ldp x28, x29, [sp], #0x10
msr fpcr, x28
msr fpsr, x29
ldp x28, x29, [sp], #0x10
ldp x26, x27, [sp], #0x10
ldp x24, x25, [sp], #0x10
ldp x22, x23, [sp], #0x10
ldp x20, x21, [sp], #0x10
ldp x18, x19, [sp], #0x10
ldp x16, x17, [sp], #0x10
ldp x14, x15, [sp], #0x10
ldp x12, x13, [sp], #0x10
ldp x10, x11, [sp], #0x10
ldp x8, x9, [sp], #0x10
ldp x6, x7, [sp], #0x10
ldp x4, x5, [sp], #0x10
ldp x2, x3, [sp], #0x10
ldp x0, x1, [sp], #0x10
RESTORE_FPU sp
.macro SAVE_USER_CTX, eframex, tmpx
#ifdef RT_USING_SMART
beq arch_ret_to_user
#endif
eret
.endm

.macro SAVE_USER_CTX
mrs x1, spsr_el1
and x1, x1, 0xf
cmp x1, xzr

bne 1f
bl lwp_uthread_ctx_save
ldp x0, x1, [sp]
mrs \tmpx, spsr_el1
and \tmpx, \tmpx, 0xf
cbz \tmpx, 1f
b 2f
1:
mov x0, \eframex
bl lwp_uthread_ctx_save
2:
#endif /* RT_USING_SMART */
.endm

.macro RESTORE_USER_CTX, ctx
ldr x1, [\ctx, #CONTEXT_OFFSET_SPSR_EL1]
and x1, x1, 0x1f
cmp x1, xzr

bne 1f
bl lwp_uthread_ctx_restore
.macro RESTORE_USER_CTX, eframex, tmpx
#ifdef RT_USING_SMART
ldr \tmpx, [\eframex, #CONTEXT_OFFSET_SPSR_EL1]
and \tmpx, \tmpx, 0x1f
cbz \tmpx, 1f
b 2f
1:
bl lwp_uthread_ctx_restore
2:
#endif /* RT_USING_SMART */
.endm

#endif /* __ARM64_INC_VECTOR_H__ */
2 changes: 2 additions & 0 deletions libcpu/aarch64/common/mp/context_gcc.S
Original file line number Diff line number Diff line change
Expand Up @@ -129,5 +129,7 @@ rt_hw_context_switch_interrupt:
b _context_switch_exit

_context_switch_exit:
.local _context_switch_exit

clrex
RESTORE_CONTEXT_SWITCH
36 changes: 9 additions & 27 deletions libcpu/aarch64/common/mp/vector_gcc.S
Original file line number Diff line number Diff line change
Expand Up @@ -12,41 +12,23 @@
#define __ASSEMBLY__
#endif

#include "../include/vector_gcc.h"
#include "vector_gcc.h"
#include "context_gcc.h"

.section .text

.globl vector_fiq
vector_fiq:
.globl vector_fiq
b .

.globl rt_hw_irq_exit

START_POINT(vector_irq)
SAVE_IRQ_CONTEXT
stp x0, x1, [sp, #-0x10]! /* X0 is thread sp */

bl rt_interrupt_enter
ldp x0, x1, [sp]

#ifdef RT_USING_SMART
SAVE_USER_CTX
#endif /* RT_USING_SMART */

bl rt_hw_trap_irq

#ifdef RT_USING_SMART
ldp x0, x1, [sp]
RESTORE_USER_CTX x0
#endif /* RT_USING_SMART */

bl rt_interrupt_leave
/**
* void rt_hw_vector_irq_sched(void *eframe)
* @brief do IRQ scheduling
*/
rt_hw_vector_irq_sched:
.globl rt_hw_vector_irq_sched

ldp x0, x1, [sp], #0x10
bl rt_scheduler_do_irq_switch

rt_hw_irq_exit:
RESTORE_IRQ_CONTEXT

START_POINT_END(vector_irq)
b rt_hw_irq_exit
3 changes: 3 additions & 0 deletions libcpu/aarch64/common/up/context_gcc.S
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ rt_hw_context_switch_to:
clrex
ldr x0, [x0]
RESTORE_CONTEXT_SWITCH x0
NEVER_RETURN

/*
* void rt_hw_context_switch(rt_ubase_t from, rt_ubase_t to);
Expand All @@ -62,6 +63,7 @@ rt_hw_context_switch:
ldr x0, [x1] // get new task stack pointer

RESTORE_CONTEXT_SWITCH x0
NEVER_RETURN

.globl rt_thread_switch_interrupt_flag
.globl rt_interrupt_from_thread
Expand Down Expand Up @@ -115,3 +117,4 @@ rt_hw_context_switch_interrupt_do:
ldr x0, [x4] // get new task's stack pointer

RESTORE_CONTEXT_SWITCH x0
NEVER_RETURN
29 changes: 13 additions & 16 deletions libcpu/aarch64/common/up/vector_gcc.S
Original file line number Diff line number Diff line change
Expand Up @@ -22,28 +22,24 @@

.section .text

vector_fiq:
.align 8
.globl vector_fiq
vector_fiq:

SAVE_IRQ_CONTEXT
bl rt_hw_trap_fiq
RESTORE_IRQ_CONTEXT

.globl rt_interrupt_enter
.globl rt_interrupt_leave
.globl rt_thread_switch_interrupt_flag
.globl rt_interrupt_from_thread
.globl rt_interrupt_to_thread
.globl rt_hw_context_switch_interrupt_do

/**
* void rt_hw_vector_irq_sched(void *eframe)
* @brief do IRQ scheduling
*/
rt_hw_vector_irq_sched:
.globl rt_hw_vector_irq_sched
.align 8
.globl vector_irq
vector_irq:
SAVE_IRQ_CONTEXT

bl rt_interrupt_enter
bl rt_hw_trap_irq
bl rt_interrupt_leave

/**
* if rt_thread_switch_interrupt_flag set, jump to
Expand All @@ -52,12 +48,13 @@ vector_irq:
ldr x1, =rt_thread_switch_interrupt_flag
ldr x2, [x1]
cmp x2, #1
b.ne vector_irq_exit
bne 1f

mov x2, #0 // clear flag
/* clear flag */
mov x2, #0
str x2, [x1]

bl rt_hw_context_switch_interrupt_do

vector_irq_exit:
RESTORE_IRQ_CONTEXT_WITHOUT_MMU_SWITCH
1:
b rt_hw_irq_exit
58 changes: 43 additions & 15 deletions libcpu/aarch64/common/vector_gcc.S
Original file line number Diff line number Diff line change
Expand Up @@ -67,32 +67,60 @@ system_vectors:
b vector_serror

#include "include/vector_gcc.h"
#define EFRAMEX x19

START_POINT(vector_exception)
SAVE_IRQ_CONTEXT
stp x0, x1, [sp, #-0x10]!
#ifdef RT_USING_SMART
SAVE_USER_CTX
#endif
mov EFRAMEX, sp

SAVE_USER_CTX EFRAMEX, x0

mov x0, EFRAMEX
bl rt_hw_trap_exception
#ifdef RT_USING_SMART
ldp x0, x1, [sp]
RESTORE_USER_CTX x0
#endif
RESTORE_USER_CTX EFRAMEX, x0

ldp x0, x1, [sp], #0x10
RESTORE_IRQ_CONTEXT_WITHOUT_MMU_SWITCH
RESTORE_IRQ_CONTEXT
START_POINT_END(vector_exception)

START_POINT(vector_serror)
SAVE_IRQ_CONTEXT
mov EFRAMEX, sp

#ifdef RT_USING_SMART
SAVE_USER_CTX
#endif
SAVE_USER_CTX EFRAMEX, x0

stp x0, x1, [sp, #-0x10]!
mov x0, EFRAMEX
bl rt_hw_trap_serror
b .

RESTORE_USER_CTX EFRAMEX, x0

NEVER_RETURN
START_POINT_END(vector_serror)

START_POINT(vector_irq)
SAVE_IRQ_CONTEXT
mov EFRAMEX, sp

/* trace IRQ level */
bl rt_interrupt_enter

SAVE_USER_CTX EFRAMEX, x0

/* handline IRQ */
mov x0, EFRAMEX
bl rt_hw_trap_irq

RESTORE_USER_CTX EFRAMEX, x0

/* restore IRQ level */
bl rt_interrupt_leave

mov x0, EFRAMEX
bl rt_hw_vector_irq_sched

b rt_hw_irq_exit
START_POINT_END(vector_irq)

rt_hw_irq_exit:
.globl rt_hw_irq_exit

RESTORE_IRQ_CONTEXT
Loading