#pragma once
#include "stub_builtins.h"
/* #ifdef __lsp_clang__ */
/* #include "stub_builtins.h" */
/* #endif */
#define __wrap_intrinsic static __attribute__((always_inline)) inline
#define __wrap_pure_intrinsic static __attribute__((always_inline)) __attribute__((pure)) inline
__wrap_intrinsic double bitcastld(long x){
  union {
    long l;
    double d;
  } tmp;
  tmp.l = x;
  return tmp.d;
}

__wrap_intrinsic long bitcastdl(double x){
  union {
    long l;
    double d;
  } tmp;
  tmp.d = x;
  return tmp.l;
}

#define DEF_BINOP(type, name, op)                                              \
  __wrap_pure_intrinsic __##type name ## _ ##type(__ ##type v0, __ ##type v1) { return (v0) op (v1); }
#define DEF_BIN_BUILTIN(type, name, sf)\
  __wrap_pure_intrinsic __##type name ## _ ##type(__ ##type v0, __ ##type v1) { return __builtin_ ## sf((v0), (v1)); }
DEF_BINOP( v8si, add, +);
DEF_BINOP( v4di, add, +);
DEF_BINOP( v4df, add, +);
DEF_BINOP( v4sf, add, +);
DEF_BINOP(uv8si, add, +);
DEF_BINOP(uv4di, add, +);

DEF_BINOP( v8si, sub, -);
DEF_BINOP( v4di, sub, -);
DEF_BINOP( v4df, sub, -);
DEF_BINOP( v4sf, sub, -);
DEF_BINOP(uv8si, sub, -);
DEF_BINOP(uv4di, sub, -);

DEF_BINOP( v4df, mul, *);
DEF_BINOP( v4sf, mul, *);

DEF_BINOP( v4df, div, /);
DEF_BINOP( v4sf, div, /);

DEF_BIN_BUILTIN( v4df, cmplt, sw_vfcmpltd);
DEF_BIN_BUILTIN( v4sf, cmplt, sw_vfcmplts);

DEF_BIN_BUILTIN( v4df, cmple, sw_vfcmpled);
DEF_BIN_BUILTIN( v4sf, cmple, sw_vfcmples);

DEF_BIN_BUILTIN( v4df, cmpeq, sw_vfcmpeqd);
DEF_BIN_BUILTIN( v4sf, cmpeq, sw_vfcmpeqs);

__wrap_intrinsic __v8si set_v8si(int v0, int v1, int v2, int v3, int v4, int v5, int v6, int v7){
  __v8si ret = __builtin_cast_si_v8si(v0);
  ret = __builtin_sw_vinsw(v1, ret, 1);
  ret = __builtin_sw_vinsw(v2, ret, 2);
  ret = __builtin_sw_vinsw(v3, ret, 3);
  ret = __builtin_sw_vinsw(v4, ret, 4);
  ret = __builtin_sw_vinsw(v5, ret, 5);
  ret = __builtin_sw_vinsw(v6, ret, 6);
  ret = __builtin_sw_vinsw(v7, ret, 7);
  return ret;
}

__wrap_intrinsic __uv8si set_uv8si(unsigned int v0, unsigned int v1, unsigned int v2, unsigned int v3, unsigned int v4, unsigned int v5, unsigned int v6, unsigned int v7){
  return __builtin_cast_v8si_uv8si(set_v8si(v0, v1, v2, v3, v4, v5, v6, v7));
}

__wrap_intrinsic __v4df set_v4df(double v0, double v1, double v2, double v3){
  __v4df ret = __builtin_cast_df_v4df(v0);
  ret = __builtin_sw_vinsfd(v1, ret, 1);
  ret = __builtin_sw_vinsfd(v2, ret, 2);
  ret = __builtin_sw_vinsfd(v3, ret, 3);
  return ret;
}
__wrap_intrinsic __v4di set_v4di(long v0, long v1, long v2, long v3){
  return __builtin_cast_v4df_v4di(set_v4df(bitcastld(v0), bitcastld(v1), bitcastld(v2), bitcastld(v3)));
}
__wrap_intrinsic __uv4di set_uv4di(unsigned long v0, unsigned long v1, unsigned long v2, unsigned long v3){
  return __builtin_cast_v4di_uv4di(set_v4di(v0, v1, v2, v3));
}
__wrap_intrinsic __v4sf set_v4sf(float v0, float v1, float v2, float v3){
  __v4sf ret = __builtin_cast_sf_v4sf(v0);
  ret = __builtin_sw_vinsfs(v1, ret, 1);
  ret = __builtin_sw_vinsfs(v2, ret, 2);
  ret = __builtin_sw_vinsfs(v3, ret, 3);
  return ret;
}

__wrap_intrinsic __v8si set1_v8si(int v0){
  #ifdef __sw_slave__
  __v8si casted = __builtin_cast_si_v8si(v0);
  return __builtin_sw_slave_vshufflew(casted, casted, 0);
  #endif
}

__wrap_intrinsic __v4df set1_v4df(double v0){
  #ifdef __sw_slave__
  __v4df casted = __builtin_cast_df_v4df(v0);
  return __builtin_sw_slave_vshuffled(casted, casted, 0);
  #endif
}

__wrap_intrinsic __v4sf set1_v4sf(float v0){
  #ifdef __sw_slave__
  __v4sf casted = __builtin_cast_sf_v4sf(v0);
  return __builtin_sw_slave_vshuffles(casted, casted, 0);
  #endif
}

__wrap_intrinsic __v4di set1_v4di(long v0){
  return __builtin_cast_v4df_v4di(set1_v4df(bitcastld(v0)));
}

__wrap_intrinsic __uv4di set1_uv4di(long v0){
  return __builtin_cast_v4di_uv4di(set1_v4di(v0));
}

__wrap_intrinsic __uv8si set1_uv8si(unsigned int v0){
  return __builtin_cast_v8si_uv8si(set1_v8si(v0));
}

__wrap_intrinsic __uv8si ldde_v8si(int *v0){
  __builtin_sw_ldwe(v0);
}
