libpayload arm64: Add library helpers
Add library helpers to access standard arm64 registers. This library also provides functions to directly read/write register based on current el. So, rest of the code doesnt need to keep checking the el and call appropriate function based on that. BUG=chrome-os-partner:31634 BRANCH=None TEST=Libpayload and depthcharge compile successfully for ryu Change-Id: Ibc0ca49f158362d4b7ab2045bf0fbd58ada79360 Signed-off-by: Patrick Georgi <pgeorgi@chromium.org> Original-Commit-Id: 2ca6da580cb51b4c23abdaf04fee2785e5780510 Original-Change-Id: I9b63e04aa26a98bbeb34fdef634776d49454ca8d Original-Signed-off-by: Furquan Shaikh <furquan@google.com> Original-Reviewed-on: https://chromium-review.googlesource.com/214575 Original-Reviewed-by: Aaron Durbin <adurbin@chromium.org> Original-Tested-by: Furquan Shaikh <furquan@chromium.org> Original-Commit-Queue: Furquan Shaikh <furquan@chromium.org> Reviewed-on: http://review.coreboot.org/8784 Tested-by: build bot (Jenkins) Reviewed-by: Stefan Reinauer <stefan.reinauer@coreboot.org>
This commit is contained in:
committed by
Patrick Georgi
parent
3b1ee0387c
commit
635b45d608
@ -67,132 +67,6 @@
|
||||
#define SCTLR_TE (1 << 30) /* Thumb exception enable */
|
||||
/* Bit 31 is reserved */
|
||||
|
||||
/*
|
||||
* Sync primitives
|
||||
*/
|
||||
/* data memory barrier */
|
||||
#define dmb_opt(opt) asm volatile ("dmb " #opt : : : "memory")
|
||||
/* data sync barrier */
|
||||
#define dsb_opt(opt) asm volatile ("dsb " #opt : : : "memory")
|
||||
/* instruction sync barrier */
|
||||
#define isb_opt(opt) asm volatile ("isb " #opt : : : "memory")
|
||||
|
||||
#define dmb() dmb_opt(sy)
|
||||
#define dsb() dsb_opt(sy)
|
||||
#define isb() isb_opt()
|
||||
|
||||
/*
|
||||
* Low-level TLB maintenance operations
|
||||
*/
|
||||
|
||||
/* invalidate entire unified TLB */
|
||||
static inline void tlbiall_el3(void)
|
||||
{
|
||||
asm volatile ("tlbi alle3" : : : "memory");
|
||||
}
|
||||
|
||||
/* invalidate unified TLB by VA, all ASID */
|
||||
static inline void tlbivaa(unsigned long va)
|
||||
{
|
||||
asm volatile ("tlbi vaae1, %0" : : "r" (va) : "memory");
|
||||
}
|
||||
|
||||
/*
|
||||
* Low-level cache maintenance operations
|
||||
*/
|
||||
|
||||
/* data cache clean and invalidate by VA to PoC */
|
||||
static inline void dccivac(unsigned long va)
|
||||
{
|
||||
asm volatile ("dc civac, %0" : : "r" (va) : "memory");
|
||||
}
|
||||
|
||||
/* data cache invalidate by set/way */
|
||||
static inline void dccisw(uint32_t val)
|
||||
{
|
||||
asm volatile ("dc cisw, %0" : : "r" (val) : "memory");
|
||||
}
|
||||
|
||||
/* data cache clean by VA to PoC */
|
||||
static inline void dccvac(unsigned long va)
|
||||
{
|
||||
asm volatile ("dc cvac, %0" : : "r" (va) : "memory");
|
||||
}
|
||||
|
||||
/* data cache clean by set/way */
|
||||
static inline void dccsw(uint32_t val)
|
||||
{
|
||||
asm volatile ("dc csw, %0" : : "r" (val) : "memory");
|
||||
}
|
||||
|
||||
/* data cache invalidate by VA to PoC */
|
||||
static inline void dcivac(unsigned long va)
|
||||
{
|
||||
asm volatile ("dc ivac, %0" : : "r" (va) : "memory");
|
||||
}
|
||||
|
||||
/* data cache invalidate by set/way */
|
||||
static inline void dcisw(uint32_t val)
|
||||
{
|
||||
asm volatile ("dc isw, %0" : : "r" (val) : "memory");
|
||||
}
|
||||
|
||||
/* instruction cache invalidate all by PoU */
|
||||
static inline void iciallu(void)
|
||||
{
|
||||
asm volatile ("ic iallu" : : "r" (0));
|
||||
}
|
||||
|
||||
/* read cache level ID register (CLIDR) */
|
||||
static inline uint32_t read_clidr(void)
|
||||
{
|
||||
uint32_t val = 0;
|
||||
asm volatile ("mrs %0, clidr_el1" : "=r" (val));
|
||||
return val;
|
||||
}
|
||||
|
||||
/* read cache size ID register register (CCSIDR) */
|
||||
static inline uint32_t read_ccsidr(void)
|
||||
{
|
||||
uint32_t val = 0;
|
||||
asm volatile ("mrs %0, ccsidr_el1" : "=r" (val));
|
||||
return val;
|
||||
}
|
||||
|
||||
/* read cache size selection register (CSSELR) */
|
||||
static inline uint32_t read_csselr(void)
|
||||
{
|
||||
uint32_t val = 0;
|
||||
asm volatile ("mrs %0, csselr_el1" : "=r" (val));
|
||||
return val;
|
||||
}
|
||||
|
||||
/* write to cache size selection register (CSSELR) */
|
||||
static inline void write_csselr(uint32_t val)
|
||||
{
|
||||
/*
|
||||
* Bits [3:1] - Cache level + 1 (0b000 = L1, 0b110 = L7, 0b111 is rsvd)
|
||||
* Bit 0 - 0 = data or unified cache, 1 = instruction cache
|
||||
*/
|
||||
asm volatile ("msr csselr_el1, %0" : : "r" (val));
|
||||
isb(); /* ISB to sync the change to CCSIDR */
|
||||
}
|
||||
|
||||
/* read system control register (SCTLR) */
|
||||
static inline uint32_t read_sctlr_el3(void)
|
||||
{
|
||||
uint32_t val;
|
||||
asm volatile ("mrs %0, sctlr_el3" : "=r" (val));
|
||||
return val;
|
||||
}
|
||||
|
||||
/* write system control register (SCTLR) */
|
||||
static inline void write_sctlr_el3(uint32_t val)
|
||||
{
|
||||
asm volatile ("msr sctlr_el3, %0" : : "r" (val) : "cc");
|
||||
isb();
|
||||
}
|
||||
|
||||
/*
|
||||
* Cache maintenance API
|
||||
*/
|
||||
|
Reference in New Issue
Block a user