2018-02-18 22:11:11 +00:00
.section .text .tlb_invalidate_all , " ax" , % p r o g b i t s
.type tlb_ i n v a l i d a t e _ a l l , % f u n c t i o n
.global tlb_invalidate_all
tlb_invalidate_all :
dsb s y
tlbi a l l e 3
dsb s y
isb
ret
2018-02-25 02:34:15 +00:00
.section .text .tlb_invalidate_all_inner_shareable , " ax" , % p r o g b i t s
.type tlb_ i n v a l i d a t e _ a l l _ i n n e r _ s h a r e a b l e , % f u n c t i o n
.global tlb_invalidate_all_inner_shareable
2018-02-18 22:11:11 +00:00
tlb_invalidate_all_inner_shareable :
dsb i s h
tlbi a l l e 3 i s
dsb i s h
isb
ret
.section .text .tlb_invalidate_page , " ax" , % p r o g b i t s
.type tlb_ i n v a l i d a t e _ p a g e , % f u n c t i o n
.global tlb_invalidate_page
tlb_invalidate_page :
lsr x8 , x0 , #12
dsb s y
tlbi v a l e 3 , x8
dsb s y
isb
ret
.section .text .tlb_invalidate_page_inner_shareable , " ax" , % p r o g b i t s
.type tlb_ i n v a l i d a t e _ p a g e _ i n n e r _ s h a r e a b l e , % f u n c t i o n
.global tlb_invalidate_page_inner_shareable
tlb_invalidate_page_inner_shareable :
lsr x8 , x0 , #12
dsb i s h
tlbi v a l e 3 i s , x8
dsb i s h
isb
ret
/* The following functions are taken/adapted from https://github.com/u-boot/u-boot/blob/master/arch/arm/cpu/armv8/cache.S */
/ *
* ( C) C o p y r i g h t 2 0 1 3
* David F e n g < f e n g h u a @phytium.com.cn>
*
* This f i l e i s b a s e d o n s a m p l e c o d e f r o m A R M v8 A R M .
*
* SPDX- L i c e n s e - I d e n t i f i e r : G P L - 2 . 0 +
* /
/ *
* void _ _ a s m _ d c a c h e _ l e v e l ( l e v e l )
*
* flush o r i n v a l i d a t e o n e l e v e l c a c h e .
*
* x0 : cache l e v e l
* x1 : 0 clean & i n v a l i d a t e , 1 i n v a l i d a t e o n l y
* x2 ~ x9 : c l o b b e r e d
* /
.section .text .__asm_dcache_level , " ax" , % p r o g b i t s
.type _ _ asm_ d c a c h e _ l e v e l , % f u n c t i o n
__asm_dcache_level :
lsl x12 , x0 , #1
msr c s s e l r _ e l 1 , x12 / * s e l e c t c a c h e l e v e l * /
isb / * s y n c c h a n g e o f c s s i d r _ e l 1 * /
mrs x6 , c c s i d r _ e l 1 / * r e a d t h e n e w c s s i d r _ e l 1 * /
and x2 , x6 , #7 / * x2 < - l o g 2 ( c a c h e l i n e s i z e ) - 4 * /
add x2 , x2 , #4 / * x2 < - l o g 2 ( c a c h e l i n e s i z e ) * /
mov x3 , #0x3ff
and x3 , x3 , x6 , l s r #3 / * x3 < - m a x n u m b e r o f #w a y s * /
clz w5 , w3 / * b i t p o s i t i o n o f #w a y s * /
mov x4 , #0x7fff
and x4 , x4 , x6 , l s r #13 / * x4 < - m a x n u m b e r o f #s e t s * /
/* x12 <- cache level << 1 */
/* x2 <- line length offset */
/* x3 <- number of cache ways - 1 */
/* x4 <- number of cache sets - 1 */
/* x5 <- bit position of #ways */
loop_set :
mov x6 , x3 / * x6 < - w o r k i n g c o p y o f #w a y s * /
loop_way :
lsl x7 , x6 , x5
orr x9 , x12 , x7 / * m a p w a y a n d l e v e l t o c i s w v a l u e * /
lsl x7 , x4 , x2
orr x9 , x9 , x7 / * m a p s e t n u m b e r t o c i s w v a l u e * /
tbz w1 , #0 , 1 f
dc i s w , x9
b 2 f
1 : dc c i s w , x9 / * c l e a n & i n v a l i d a t e b y s e t / w a y * /
2 : subs x6 , x6 , #1 / * d e c r e m e n t t h e w a y * /
b. g e l o o p _ w a y
subs x4 , x4 , #1 / * d e c r e m e n t t h e s e t * /
b. g e l o o p _ s e t
ret
/ *
* void _ _ a s m _ f l u s h _ d c a c h e _ a l l ( i n t i n v a l i d a t e _ o n l y )
*
* x0 : 0 clean & i n v a l i d a t e , 1 i n v a l i d a t e o n l y
*
* flush o r i n v a l i d a t e a l l d a t a c a c h e b y S E T / W A Y .
* /
.section .text .__asm_dcache_all , " ax" , % p r o g b i t s
.type _ _ asm_ d c a c h e _ a l l , % f u n c t i o n
__asm_dcache_all :
mov x1 , x0
dsb s y
mrs x10 , c l i d r _ e l 1 / * r e a d c l i d r _ e l 1 * /
lsr x11 , x10 , #24
and x11 , x11 , #0x7 / * x11 < - l o c * /
cbz x11 , f i n i s h e d / * i f l o c i s 0 , e x i t * /
mov x15 , l r
mov x0 , #0 / * s t a r t f l u s h a t c a c h e l e v e l 0 * /
/* x0 <- cache level */
/* x10 <- clidr_el1 */
/* x11 <- loc */
/* x15 <- return address */
loop_level :
lsl x12 , x0 , #1
add x12 , x12 , x0 / * x0 < - t r i p l e d c a c h e l e v e l * /
lsr x12 , x10 , x12
and x12 , x12 , #7 / * x12 < - c a c h e t y p e * /
cmp x12 , #2
b. l t s k i p / * s k i p i f n o c a c h e o r i c a c h e * /
bl _ _ a s m _ d c a c h e _ l e v e l / * x1 = 0 f l u s h , 1 i n v a l i d a t e * /
skip :
add x0 , x0 , #1 / * i n c r e m e n t c a c h e l e v e l * /
cmp x11 , x0
b. g t l o o p _ l e v e l
mov x0 , #0
msr c s s e l r _ e l 1 , x0 / * r e s t o r e c s s e l r _ e l 1 * /
dsb s y
isb
mov l r , x15
finished :
ret
.section .text .flush_dcache_all , " ax" , % p r o g b i t s
.type flush_ d c a c h e _ a l l , % f u n c t i o n
.global flush_dcache_all
flush_dcache_all :
mov x0 , #0
b _ _ a s m _ d c a c h e _ a l l
.section .text .invalidate_dcache_all , " ax" , % p r o g b i t s
.type invalidate_ d c a c h e _ a l l , % f u n c t i o n
.global invalidate_dcache_all
invalidate_dcache_all :
mov x0 , #1
b _ _ a s m _ d c a c h e _ a l l
/ *
* void _ _ a s m _ f l u s h _ d c a c h e _ r a n g e ( s t a r t , e n d ) ( r e n a m e d - > f l u s h _ d c a c h e _ r a n g e )
*
* clean & i n v a l i d a t e d a t a c a c h e i n t h e r a n g e
*
* x0 : start a d d r e s s
* x1 : end a d d r e s s
* /
.section .text .flush_dcache_range , " ax" , % p r o g b i t s
.type flush_ d c a c h e _ r a n g e , % f u n c t i o n
.global flush_dcache_range
flush_dcache_range :
mrs x3 , c t r _ e l 0
lsr x3 , x3 , #16
and x3 , x3 , #0xf
mov x2 , #4
lsl x2 , x2 , x3 / * c a c h e l i n e s i z e * /
/* x2 <- minimal cache line size in cache system */
sub x3 , x2 , #1
bic x0 , x0 , x3
1 : dc c i v a c , x0 / * c l e a n & i n v a l i d a t e d a t a o r u n i f i e d c a c h e * /
add x0 , x0 , x2
cmp x0 , x1
b. l o 1 b
dsb s y
ret
/ *
* void _ _ a s m _ i n v a l i d a t e _ d c a c h e _ r a n g e ( s t a r t , e n d ) ( - > i n v a l i d a t e _ d c a c h e _ r a n g e )
*
* invalidate d a t a c a c h e i n t h e r a n g e
*
* x0 : start a d d r e s s
* x1 : end a d d r e s s
* /
.section .text .invalidate_dcache_range , " ax" , % p r o g b i t s
.type invalidate_ d c a c h e _ r a n g e , % f u n c t i o n
.global invalidate_dcache_range
invalidate_dcache_range :
mrs x3 , c t r _ e l 0
ubfm x3 , x3 , #16 , #19
mov x2 , #4
lsl x2 , x2 , x3 / * c a c h e l i n e s i z e * /
/* x2 <- minimal cache line size in cache system */
sub x3 , x2 , #1
bic x0 , x0 , x3
1 : dc i v a c , x0 / * i n v a l i d a t e d a t a o r u n i f i e d c a c h e * /
add x0 , x0 , x2
cmp x0 , x1
b. l o 1 b
dsb s y
ret
/ *
2018-02-18 22:45:44 +00:00
* void _ _ a s m _ i n v a l i d a t e _ i c a c h e _ a l l ( v o i d ) ( - > i n v a l i d a t e _ i c a c h e _ i n n e r _ s h a r e a b l e )
2018-02-18 22:11:11 +00:00
*
* invalidate a l l i c a c h e e n t r i e s .
* /
2018-02-18 22:45:44 +00:00
.section .text .invalidate_icache_inner_shareable , " ax" , % p r o g b i t s
.type invalidate_ i c a c h e _ i n n e r _ s h a r e a b l e , % f u n c t i o n
.global invalidate_icache_inner_shareable
invalidate_icache_inner_shareable :
dsb i s h
2018-02-18 22:11:11 +00:00
isb
2018-02-18 22:45:44 +00:00
ic i a l l u i s
dsb i s h
2018-02-18 22:11:11 +00:00
isb
ret
2018-02-28 00:35:35 +00:00
/* Final steps before power down. */
.section .text .finalize_powerdown , " ax" , % p r o g b i t s
.type finalize_ p o w e r d o w n , % f u n c t i o n
.global finalize_powerdown
finalize_powerdown :
/* All data access to Normal memory from EL0/EL1 + all Normal Memory accesses to EL0/1 stage 1 translation tables non-cacheable for all levels, unified cache. */
mrs x0 , s c t l r _ e l 1
and x0 , x0 , #0xffffffff f f f f f f f b
msr s c t l r _ e l 1 , x0
isb
/* Same as above, for EL3. */
mrs x0 , s c t l r _ e l 3
and x0 , x0 , #0xffffffff f f f f f f f b
msr s c t l r _ e l 3 , x0
isb
/* Disable table walk descriptor access prefetch, disable instruction prefetch, disable data prefetch. */
mrs x0 , s3 _ 1 _ c15 _ c2 _ 1
orr x0 , x0 , #0x40000000 0 0
and x0 , x0 , #0xffffffe7 f f f f f f f f
and x0 , x0 , #0xfffffffc f f f f f f f f
msr s3 _ 1 _ c15 _ c2 _ 1 , x0
isb
dsb s y
bl f l u s h _ d c a c h e _ a l l
/* Disable receiving instruction cache/tbl maintenance operations. */
mrs x0 , s3 _ 1 _ c15 _ c2 _ 1
and x0 , x0 , #0xffffffff f f f f f f b f
msr s3 _ 1 _ c15 _ c2 _ 1 , x0
/* Prepare GICC */
bl i n t r _ p r e p a r e _ g i c c _ f o r _ s l e e p
/* Set OS double lock */
mrs x0 , o s d l r _ e l 1
orr x0 , x0 , #1
msr o s d l r _ e l 1 , x0
isb
dsb s y
wait_for_power_off :
wfi
b w a i t _ f o r _ p o w e r _ o f f
2018-02-28 00:10:51 +00:00
/* Call a function with desired stack pointer. */
.section .text .call_with_stack_pointer , " ax" , % p r o g b i t s
.type call_ w i t h _ s t a c k _ p o i n t e r , % f u n c t i o n
.global call_with_stack_pointer
call_with_stack_pointer :
mov s p , x0
br x1