mips:compel/arch/mips: Add architecture support to compel tool and libraries

This patch only adds the support but does not enable it for building.

Signed-off-by: Guoyun Sun <sunguoyun@loongson.cn>
This commit is contained in:
Guoyun Sun 2020-04-08 10:13:22 +08:00 committed by Andrei Vagin
parent 8be1d457d7
commit ba0d6dbac1
32 changed files with 1627 additions and 0 deletions

View file

@ -0,0 +1,148 @@
#ifndef __CR_ATOMIC_H__
#define __CR_ATOMIC_H__
#include <linux/types.h>
#include "common/compiler.h"
#include "common/arch/mips/asm/utils.h"
#include "common/arch/mips/asm/cmpxchg.h"
/*
* atomic_read - read atomic variable
* @v: pointer of type atomic_t
*
* Atomically reads the value of @v.
*/
#define atomic_read(v) (*(volatile int *)&(v)->counter)
/*
* atomic_set - set atomic variable
* @v: pointer of type atomic_t
* @i: required value
*
* Atomically sets the value of @v to @i.
*/
#define atomic_set(v, i) ((v)->counter = (i))
/*
* atomic_add - add integer to atomic variable
* @i: integer value to add
* @v: pointer of type atomic_t
*
* Atomically adds @i to @v.
*/
static __inline__ void atomic_add(int i, atomic_t * v)
{
int temp;
do {
__asm__ __volatile__(
" .set mips3 \n"
" ll %0, %1 # atomic_add \n"
" addu %0, %2 \n"
" sc %0, %1 \n"
" .set mips0 \n"
: "=&r" (temp), "+m" (v->counter)
: "Ir" (i));
} while (unlikely(!temp));
}
/*
* atomic_sub - subtract the atomic variable
* @i: integer value to subtract
* @v: pointer of type atomic_t
*
* Atomically subtracts @i from @v.
*/
static __inline__ void atomic_sub(int i, atomic_t * v)
{
int temp;
do {
__asm__ __volatile__(
" .set mips3 \n"
" ll %0, %1 # atomic_sub \n"
" subu %0, %2 \n"
" sc %0, %1 \n"
" .set mips0 \n"
: "=&r" (temp), "+m" (v->counter)
: "Ir" (i));
} while (unlikely(!temp));
}
/*
* Same as above, but return the result value
*/
static __inline__ int atomic_add_return(int i, atomic_t * v)
{
int result;
smp_mb__before_llsc();
int temp;
do {
__asm__ __volatile__(
" .set mips3 \n"
" ll %1, %2 # atomic_add_return \n"
" addu %0, %1, %3 \n"
" sc %0, %2 \n"
" .set mips0 \n"
: "=&r" (result), "=&r" (temp), "+m" (v->counter)
: "Ir" (i));
} while (unlikely(!result));
result = temp + i;
smp_llsc_mb();
return result;
}
static __inline__ int atomic_sub_return(int i, atomic_t * v)
{
int result;
smp_mb__before_llsc();
int temp;
do {
__asm__ __volatile__(
" .set mips3 \n"
" ll %1, %2 # atomic_sub_return \n"
" subu %0, %1, %3 \n"
" sc %0, %2 \n"
" .set mips0 \n"
: "=&r" (result), "=&r" (temp), "+m" (v->counter)
: "Ir" (i));
} while (unlikely(!result));
result = temp - i;
smp_llsc_mb();
return result;
}
#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
#define atomic_dec_return(v) atomic_sub_return(1, (v))
#define atomic_inc_return(v) atomic_add_return(1, (v))
/*
* atomic_inc - increment atomic variable
* @v: pointer of type atomic_t
*
* Atomically increments @v by 1.
*/
#define atomic_inc( v) atomic_add(1, (v))
/*
* atomic_dec - decrement and test
* @v: pointer of type atomic_t
*
* Atomically decrements @v by 1.
*/
#define atomic_dec(v) atomic_sub(1, (v))
#endif /* __CR_ATOMIC_H__ */

View file

@ -0,0 +1,41 @@
#ifndef _LINUX_BITOPS_H
#define _LINUX_BITOPS_H
#include <asm/types.h>
#include "common/compiler.h"
#include "common/asm-generic/bitops.h"
/**
* test_and_set_bit - Set a bit and return its old value
* @nr: Bit to set
* @addr: Address to count from
*
* This operation is atomic and cannot be reordered.
* It also implies a memory barrier.
*/
static inline int test_and_set_bit(unsigned long nr,
volatile unsigned long *addr)
{
unsigned long *m = ((unsigned long *) addr) + (nr >> 6);
unsigned long temp = 0;
unsigned long res;
int bit = nr & 63UL;
do {
__asm__ __volatile__(
" .set mips3 \n"
" lld %0, %1 # test_and_set_bit \n"
" or %2, %0, %3 \n"
" scd %2, %1 \n"
" .set mips0 \n"
: "=&r" (temp), "+m" (*m), "=&r" (res)
: "r" (1UL << bit)
: "memory");
} while (unlikely(!res));
res = temp & (1UL << bit);
return res != 0;
}
#endif

View file

@ -0,0 +1,6 @@
#ifndef __CR_BITSPERLONG_H__
#define __CR_BITSPERLONG_H__
# define BITS_PER_LONG 64
#endif /* __CR_BITSPERLONG_H__ */

View file

@ -0,0 +1,67 @@
#ifndef __CR_CMPXCHG_H__
#define __CR_CMPXCHG_H__
#define __cmpxchg_asm(ld, st, m, old, new) \
({ \
__typeof(*(m)) __ret; \
\
if (kernel_uses_llsc) { \
__asm__ __volatile__( \
" .set push \n" \
" .set noat \n" \
" .set mips3 \n" \
"1: " ld " %0, %2 # __cmpxchg_asm \n" \
" bne %0, %z3, 2f \n" \
" .set mips0 \n" \
" move $1, %z4 \n" \
" .set mips3 \n" \
" " st " $1, %1 \n" \
" beqz $1, 1b \n" \
" .set pop \n" \
"2: \n" \
: "=&r" (__ret), "=R" (*m) \
: "R" (*m), "Jr" (old), "Jr" (new) \
: "memory"); \
} else { \
} \
\
__ret; \
})
/*
* This function doesn't exist, so you'll get a linker error
* if something tries to do an invalid cmpxchg().
*/
extern void __cmpxchg_called_with_bad_pointer(void);
#define __cmpxchg(ptr, old, new, pre_barrier, post_barrier) \
({ \
__typeof__(ptr) __ptr = (ptr); \
__typeof__(*(ptr)) __old = (old); \
__typeof__(*(ptr)) __new = (new); \
__typeof__(*(ptr)) __res = 0; \
\
pre_barrier; \
\
switch (sizeof(*(__ptr))) { \
case 4: \
__res = __cmpxchg_asm("ll", "sc", __ptr, __old, __new); \
break; \
case 8: \
if (sizeof(long) == 8) { \
__res = __cmpxchg_asm("lld", "scd", __ptr, \
__old, __new); \
break; \
} \
default: \
__cmpxchg_called_with_bad_pointer(); \
break; \
} \
\
post_barrier; \
\
__res; \
})
#define cmpxchg(ptr, old, new) __cmpxchg(ptr, old, new, smp_mb__before_llsc(), smp_llsc_mb())
#endif /* __CR_CMPXCHG_H__ */

View file

@ -0,0 +1,38 @@
#ifndef _ASM_GENERIC_BITOPS_FLS64_H_
#define _ASM_GENERIC_BITOPS_FLS64_H_
#include <asm/types.h>
/**
* fls64 - find last set bit in a 64-bit word
* @x: the word to search
*
* This is defined in a similar way as the libc and compiler builtin
* ffsll, but returns the position of the most significant set bit.
*
* fls64(value) returns 0 if value is 0 or the position of the last
* set bit if value is nonzero. The last (most significant) bit is
* at position 64.
*/
#include "common/arch/mips/asm/bitops.h"
#if BITS_PER_LONG == 32
static __always_inline int fls64(__u64 x)
{
__u32 h = x >> 32;
if (h)
return fls(h) + 32;
return fls(x);
}
#elif BITS_PER_LONG == 64
extern unsigned long __fls(unsigned long word);
static __always_inline int fls64(__u64 x)
{
if (x == 0)
return 0;
return __fls(x) + 1;
}
#else
#error BITS_PER_LONG not 32 or 64
#endif
#endif /* _ASM_GENERIC_BITOPS_FLS64_H_ */

View file

@ -0,0 +1,58 @@
#ifndef __CR_LINKAGE_H__
#define __CR_LINKAGE_H__
#define zero $0 /* wired zero */
#define AT $1 /* assembler temp - uppercase because of ".set at" */
#define v0 $2
#define v1 $3
#define a0 $4
#define a1 $5
#define a2 $6
#define a3 $7
#define a4 $8
#define a5 $9
#define a6 $10
#define a7 $11
#define t0 $12
#define t1 $13
#define t2 $14
#define t3 $15
#define s0 $16 /* callee saved */
#define s1 $17
#define s2 $18
#define s3 $19
#define s4 $20
#define s5 $21
#define s6 $22
#define s7 $23
#define t8 $24 /* caller saved */
#define t9 $25
#define jp $25 /* PIC jump register */
#define k0 $26 /* kernel scratch */
#define k1 $27
#define gp $28 /* global pointer */
#define sp $29 /* stack pointer */
#define fp $30 /* frame pointer */
#define s8 $30 /* same like fp! */
#define ra $31 /* return address */
#define __ALIGN .align 8
#define __ALIGN_STR ".align 8"
#define GLOBAL(name) \
.globl name; \
name:
#define ENTRY(name) \
.globl name; \
__ALIGN; \
.type name, @function; \
name:
#define END(sym) \
.size sym, . - sym
#endif /* __CR_LINKAGE_H__ */

View file

@ -0,0 +1,39 @@
#ifndef __CR_ASM_PAGE_H__
#define __CR_ASM_PAGE_H__
#define ARCH_HAS_LONG_PAGES
#ifndef CR_NOGLIBC
#include <string.h> /* ffsl() */
#include <unistd.h> /* _SC_PAGESIZE */
static unsigned __page_size;
static unsigned __page_shift;
static inline unsigned page_size(void)
{
if (!__page_size)
__page_size = sysconf(_SC_PAGESIZE);
return __page_size;
}
static inline unsigned page_shift(void)
{
if (!__page_shift)
__page_shift = (ffsl(page_size()) - 1);
return __page_shift;
}
#define PAGE_SIZE page_size()
#define PAGE_SHIFT page_shift()
#define PAGE_MASK (~(PAGE_SIZE - 1))
#define PAGE_PFN(addr) ((addr) / PAGE_SIZE)
#else /* CR_NOGLIBC */
extern unsigned page_size(void);
#define PAGE_SIZE page_size()
#endif /* CR_NOGLIBC */
#endif /* __CR_ASM_PAGE_H__ */

View file

@ -0,0 +1,24 @@
#ifndef __UTILS_H__
#define __UTILS_H__
# define kernel_uses_llsc 1
typedef struct {
int counter;
}atomic_t;
/*
* FIXME: detect with compel_cpu_has_feature() if LL/SC implicitly
* provide a memory barrier.
*/
#define __WEAK_LLSC_MB " sync \n"
#define smp_llsc_mb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory")
#define smp_mb__before_llsc() smp_llsc_mb()
#define smp_mb__before_atomic() smp_mb__before_llsc()
#define smp_mb__after_atomic() smp_llsc_mb()
#endif /* __UTILS_H__ */