/*
 * PhotonRTOS础光实时操作系统 -- 处理器相关
 *
 * Copyright (C) 2022, 2023 国科础石(重庆)软件有限公司
 *
 * 作者: Baoyou Xie <xiebaoyou@kernelsoft.com>
 *
 * License terms: GNU General Public License (GPL) version 3
 *
 */

#ifndef __ASM_PROCESSOR_H
#define __ASM_PROCESSOR_H

#ifndef __ASSEMBLY__

#include <asm/types.h>


#ifdef CONFIG_SMP
#define __ALT_SMP_ASM(smp, up)						\
	"9998:	" smp "\n"						\
	"	.pushsection \".alt.smp.init\", \"a\"\n"		\
	"	.intptr_t	9998b\n"					\
	"	" up "\n"						\
	"	.popsection\n"
#else
#define __ALT_SMP_ASM(smp, up)	up
#endif

#ifdef CONFIG_CPU_CORTEX_M
#ifdef __ASSEMBLY__
#define W(instr)	instr.w
#else
#define WASM(instr)	#instr ".w"
#endif

#define WFE_COND(cond)	__ALT_SMP_ASM(		\
	"it " cond "\n\t"			\
	"wfe" cond ".n",			\
						\
	"nop.w"					\
)
#endif

#ifdef CONFIG_CPU_CORTEX_R
#ifdef __ASSEMBLY__
#define W(instr)	instr
#else
#define WASM(instr)	#instr
#endif

#endif


#define SEV __ALT_SMP_ASM(WASM(sev), WASM(nop))

static inline void dsb_sev(void)
{

	dsb(ishst);
	asm(SEV);
}

struct cpu_context {
	uintptr_t	r4;
	uintptr_t	r5;
	uintptr_t	r6;
	uintptr_t	r7;
	uintptr_t	r8;
	uintptr_t	r9;
	uintptr_t	sl;
	uintptr_t	fp;
	uintptr_t	sp;
	uintptr_t	pc;
	uintptr_t	extra[2];		/* Xscale 'acc' register, etc */

};

struct task_spot {
	struct cpu_context cpu_context;
};

static inline void cpu_relax(void)
{
	asm volatile("yield" ::: "memory");
}

#ifndef ARCH_HAS_PREFETCH
#define prefetch(x) __builtin_prefetch(x)
#endif
/*
	prefetch(x) attempts to pre-emptively get the memory pointed to
	by address "x" into the CPU L1 cache.
	prefetch(x) should not cause any kind of exception, prefetch(0) is
	specifically ok.
	there is also PREFETCH_STRIDE which is the architecure-preferred
	"lookahead" size for prefetching streamed operations.
*/

#ifndef ARCH_HAS_PREFETCHW
#define prefetchw(x) __builtin_prefetch(x, 1)
#endif

#ifndef ARCH_HAS_SPINLOCK_PREFETCH
#define spin_lock_prefetch(x) prefetchw(x)
#endif

#ifndef PREFETCH_STRIDE
#define PREFETCH_STRIDE (4*L1_CACHE_BYTES)
#endif

static inline void prefetch_range(void *addr, size_t len)
{
#ifdef ARCH_HAS_PREFETCH
	int8_t *cp;
	int8_t *end = addr + len;

	for (cp = addr; cp < end; cp += PREFETCH_STRIDE)
		prefetch(cp);
#endif
}

#define ARCH_FUN_cpu_do_idle_DEFINED
extern void cpu_do_idle(void);
void init_arch_cpu(void);

#endif /* __ASSEMBLY__ */

#endif /* __ASM_PROCESSOR_H */
