// code: language=gas insertSpaces=false tabSize=8
	.set noreorder
	.set noat
	.set macro
	.arch sw3
#include "sys/regdef.h"
#define aux      v0
#define flip0011 t0
#define flip0101 t1
#define flip0110 t2
#define vswap    t3
#define vcmp     t4
#define av0      t5
#define av1      t6
#define av2      t7
#define av3      t8
#define av4      t9
#define av5      t10
#define av6      t11
#define av7      t12
#define av8      a1
#define av9      a2
#define ava      a3
#define avb      a4
#define avc      a5
#define avd      s0
#define ave      s1
#define avf      s2
.macro	init_flips
	ldi	aux, 0x6c($31)
	wcsr	aux, 0x80
	#ldih	aux, 0xbff0($31)
	ldih	aux, -0x4010($31)
	vinsw	aux, $31, 3, aux
	ldih    aux, 0x3ff0($31)
	sll     aux, 32, aux
	#00 00 01 01
	vshff	aux, aux, 0x05, flip0011
	#00 01 00 01
	vshff	aux, aux, 0x11, flip0101
	#00 01 01 00
	vshff	aux, aux, 0x14, flip0110
	vcpysn	flip0011, flip0011, flip0011
	vcpysn	flip0101, flip0101, flip0101
	vcpysn	flip0110, flip0110, flip0110
	ldi	aux, 0x1($31)
	sll	aux, 63, aux
	vshff	aux, aux, 0, aux
.endm

.macro	vselgt a, b, c, d
	vsellt	\a, \c, \b, \d
.endm
.macro	bitonic_vec vec, flip, mask, select
	vshff	\vec, \vec, \mask, vswap
	vsubl	\vec, vswap, vcmp
	vlog3r0	aux, \flip, vcmp, vcmp
	\select	vcmp, \vec, vswap, \vec
.endm

.macro	bitonic_0011 vec
	bitonic_vec	\vec, flip0011, 0x4e, vsellt
.endm
.macro	bitonic_1100 vec
	bitonic_vec	\vec, flip0011, 0x4e, vselgt
.endm
.macro	bitonic_0101 vec
	bitonic_vec	\vec, flip0101, 0xb1, vsellt
.endm
.macro bitonic_1010 vec
	bitonic_vec	\vec, flip0101, 0xb1, vselgt
.endm
.macro	bitonic_0110 vec
	bitonic_vec	\vec, flip0110, 0xb1, vsellt
.endm
.macro	bitonic_1001 vec
	bitonic_vec	\vec, flip0110, 0xb1, vselgt
.endm
.macro	bitonic_inc v0, v1
	vsubl	\v0, \v1, vcmp
	vcpys	vcmp, flip0011, vcmp
	vsellt	vcmp, \v1, \v0, vswap
	vsellt	vcmp, \v0, \v1, \v0
	vcpys	vswap, vswap, \v1
.endm
.macro	bitonic_dec v0, v1
	vsubl	\v0, \v1, vcmp
	vcpys	vcmp, flip0011, vcmp
	vsellt	vcmp, \v0, \v1, vswap
	vsellt	vcmp, \v1, \v0, \v0
	vcpys	vswap, vswap, \v1
.endm
.macro	bitonic_init_8 v0, v1
	bitonic_0110	\v0
	bitonic_0110	\v1
	bitonic_0011	\v0
	bitonic_1100	\v1
	bitonic_0101	\v0
	bitonic_1010	\v1
.endm
.macro	bitonic_inc_8 v0, v1
	bitonic_inc	\v0, \v1
	bitonic_0011	\v0
	bitonic_0011	\v1
	bitonic_0101	\v0
	bitonic_0101	\v1
.endm
.macro	bitonic_dec_8 v0, v1
	bitonic_dec	\v0, \v1
	bitonic_1100	\v0
	bitonic_1100	\v1
	bitonic_1010	\v0
	bitonic_1010	\v1
.endm
.macro	bitonic_init_16 v0, v1, v2, v3
	bitonic_init_8	\v0, \v1
	bitonic_inc_8	\v0, \v1
	bitonic_init_8	\v2, \v3
	bitonic_dec_8	\v2, \v3
.endm
.macro	bitonic_inc_16 v0, v1, v2, v3
	bitonic_inc	\v0, \v2
	bitonic_inc	\v1, \v3
	bitonic_inc_8	\v0, \v1
	bitonic_inc_8	\v2, \v3
.endm
.macro	bitonic_dec_16 v0, v1, v2, v3
	bitonic_dec	\v0, \v2
	bitonic_dec	\v1, \v3
	bitonic_dec_8	\v0, \v1
	bitonic_dec_8	\v2, \v3
.endm
.macro	bitonic_init_32 v0, v1, v2, v3, v4, v5, v6, v7
	bitonic_init_16	\v0, \v1, \v2, \v3
	bitonic_inc_16	\v0, \v1, \v2, \v3
	bitonic_init_16	\v4, \v5, \v6, \v7
	bitonic_dec_16	\v4, \v5, \v6, \v7
.endm
.macro	bitonic_inc_32 v0, v1, v2, v3, v4, v5, v6, v7
	bitonic_inc	\v0, \v4
	bitonic_inc	\v1, \v5
	bitonic_inc	\v2, \v6
	bitonic_inc	\v3, \v7
	bitonic_inc_16	\v0, \v1, \v2, \v3
	bitonic_inc_16	\v4, \v5, \v6, \v7
.endm
.macro	bitonic_dec_32 v0, v1, v2, v3, v4, v5, v6, v7
	bitonic_dec	\v0, \v4
	bitonic_dec	\v1, \v5
	bitonic_dec	\v2, \v6
	bitonic_dec	\v3, \v7
	bitonic_dec_16	\v0, \v1, \v2, \v3
	bitonic_dec_16	\v4, \v5, \v6, \v7
.endm
.macro	bitonic_init_64 v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, va, vb, vc, vd, ve, vf
	bitonic_init_32	\v0, \v1, \v2, \v3, \v4, \v5, \v6, \v7
	bitonic_inc_32	\v0, \v1, \v2, \v3, \v4, \v5, \v6, \v7
	bitonic_init_32	\v8, \v9, \va, \vb, \vc, \vd, \ve, \vf
	bitonic_dec_32	\v8, \v9, \va, \vb, \vc, \vd, \ve, \vf
.endm
.macro	bitonic_inc_64 v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, va, vb, vc, vd, ve, vf
	bitonic_inc	\v0, \v8 
	bitonic_inc	\v1, \v9
	bitonic_inc	\v2, \va
	bitonic_inc	\v3, \vb
	bitonic_inc	\v4, \vc
	bitonic_inc	\v5, \vd
	bitonic_inc	\v6, \ve
	bitonic_inc	\v7, \vf
	bitonic_inc_32	\v0, \v1, \v2, \v3, \v4, \v5, \v6, \v7
	bitonic_inc_32	\v8, \v9, \va, \vb, \vc, \vd, \ve, \vf
.endm
.macro	bitonic_dec_64 v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, va, vb, vc, vd, ve, vf
	bitonic_dec	\v0, \v8 
	bitonic_dec	\v1, \v9
	bitonic_dec	\v2, \va
	bitonic_dec	\v3, \vb
	bitonic_dec	\v4, \vc
	bitonic_dec	\v5, \vd
	bitonic_dec	\v6, \ve
	bitonic_dec	\v7, \vf
	bitonic_dec_32	\v0, \v1, \v2, \v3, \v4, \v5, \v6, \v7
	bitonic_dec_32	\v8, \v9, \va, \vb, \vc, \vd, \ve, \vf
.endm

.macro	lda0_64
	vldd	av0, 0x000(a0)
	vldd	av1, 0x020(a0)
	vldd	av2, 0x040(a0)
	vldd	av3, 0x060(a0)
	vldd	av4, 0x080(a0)
	vldd	av5, 0x0a0(a0)
	vldd	av6, 0x0c0(a0)
	vldd	av7, 0x0e0(a0)
	vldd	av8, 0x100(a0)
	vldd	av9, 0x120(a0)
	vldd	ava, 0x140(a0)
	vldd	avb, 0x160(a0)
	vldd	avc, 0x180(a0)
	vldd	avd, 0x1a0(a0)
	vldd	ave, 0x1c0(a0)
	vldd	avf, 0x1e0(a0)
.endm
.macro	sta0_64
	vstd	av0, 0x000(a0)
	vstd	av1, 0x020(a0)
	vstd	av2, 0x040(a0)
	vstd	av3, 0x060(a0)
	vstd	av4, 0x080(a0)
	vstd	av5, 0x0a0(a0)
	vstd	av6, 0x0c0(a0)
	vstd	av7, 0x0e0(a0)
	vstd	av8, 0x100(a0)
	vstd	av9, 0x120(a0)
	vstd	ava, 0x140(a0)
	vstd	avb, 0x160(a0)
	vstd	avc, 0x180(a0)
	vstd	avd, 0x1a0(a0)
	vstd	ave, 0x1c0(a0)
	vstd	avf, 0x1e0(a0)
.endm
	.section .text1
	.align 4
	
	.globl slave_bitonic_sort_64
	.type slave_bitonic_sort_64 STT_FUNC
	.globl bitonic_sort_64_inc
	.ent bitonic_sort_64_inc
slave_bitonic_sort_64:
bitonic_sort_64_inc:
	ldi	sp, -96(sp)
	vstd	s0,  0(sp)
	vstd	s1, 32(sp)
	vstd	s2, 64(sp)
	init_flips
	lda0_64
	bitonic_init_64	av0, av1, av2, av3, av4, av5, av6, av7, av8, av9, ava, avb, avc, avd, ave, avf
	bitonic_inc_64	av0, av1, av2, av3, av4, av5, av6, av7, av8, av9, ava, avb, avc, avd, ave, avf
	sta0_64
	vldd	s0,  0(sp)
	vldd	s1, 32(sp)
	vldd	s2, 64(sp)
	ldi	sp, 96(sp)
	ret
        .end bitonic_sort_64_inc

	.ent bitonic_64_inc
	.globl bitonic_64_inc
bitonic_64_inc:
	ldi	sp, -96(sp)
	vstd	s0,  0(sp)
	vstd	s1, 32(sp)
	vstd	s2, 64(sp)
	init_flips
	lda0_64
	bitonic_inc_64	av0, av1, av2, av3, av4, av5, av6, av7, av8, av9, ava, avb, avc, avd, ave, avf
	sta0_64
	vldd	s0,  0(sp)
	vldd	s1, 32(sp)
	vldd	s2, 64(sp)
	ldi	sp, 96(sp)
	ret
        .end bitonic_64_inc

	.globl bitonic_sort_64_dec
	.ent bitonic_sort_64_dec
bitonic_sort_64_dec:
	ldi	sp, -96(sp)
	vstd	s0,  0(sp)
	vstd	s1, 32(sp)
	vstd	s2, 64(sp)
	init_flips
	lda0_64
	bitonic_init_64	av0, av1, av2, av3, av4, av5, av6, av7, av8, av9, ava, avb, avc, avd, ave, avf
	bitonic_dec_64	av0, av1, av2, av3, av4, av5, av6, av7, av8, av9, ava, avb, avc, avd, ave, avf
	sta0_64
	vldd	s0,  0(sp)
	vldd	s1, 32(sp)
	vldd	s2, 64(sp)
	ldi	sp, 96(sp)
	ret
        .end bitonic_sort_64_dec

	.ent bitonic_64_dec
	.globl bitonic_64_dec
bitonic_64_dec:
	ldi	sp, -96(sp)
	vstd	s0,  0(sp)
	vstd	s1, 32(sp)
	vstd	s2, 64(sp)
	init_flips
	lda0_64
	bitonic_dec_64	av0, av1, av2, av3, av4, av5, av6, av7, av8, av9, ava, avb, avc, avd, ave, avf
	sta0_64
	vldd	s0,  0(sp)
	vldd	s1, 32(sp)
	vldd	s2, 64(sp)
	ldi	sp, 96(sp)
	ret
        .end bitonic_64_dec

	.ent bitonic_sort_8
	.globl bitonic_sort_8
bitonic_sort_8:
	vldd	av0, 0x000(a0)
	vldd	av1, 0x020(a0)
	init_flips
	bitonic_init_8	av0, av1
	bitonic_inc_8	av0, av1
	vstd	av0, 0x000(a0)
	vstd	av1, 0x020(a0)
	ret
	.end bitonic_sort_8

	.ent bitonic_sort_32
	.globl bitonic_sort_32
bitonic_sort_32:
	init_flips
	vldd	av0, 0x000(a0)
	vldd	av1, 0x020(a0)
	vldd	av2, 0x040(a0)
	vldd	av3, 0x060(a0)
	vldd	av4, 0x080(a0)
	vldd	av5, 0x0a0(a0)
	vldd	av6, 0x0c0(a0)
	vldd	av7, 0x0e0(a0)
	bitonic_init_32	av0, av1, av2, av3, av4, av5, av6, av7
	bitonic_inc_32	av0, av1, av2, av3, av4, av5, av6, av7
	vstd	av0, 0x000(a0)
	vstd	av1, 0x020(a0)
	vstd	av2, 0x040(a0)
	vstd	av3, 0x060(a0)
	vstd	av4, 0x080(a0)
	vstd	av5, 0x0a0(a0)
	vstd	av6, 0x0c0(a0)
	vstd	av7, 0x0e0(a0)
	ret
        .end bitonic_sort_32

	.ent bitonic_sort_16
	.globl bitonic_sort_16
bitonic_sort_16:
	init_flips
	vldd	av0, 0x000(a0)
	vldd	av1, 0x020(a0)
	vldd	av2, 0x040(a0)
	vldd	av3, 0x060(a0)
	bitonic_init_16	av0, av1, av2, av3
	bitonic_inc_16	av0, av1, av2, av3
	vstd	av0, 0x000(a0)
	vstd	av1, 0x020(a0)
	vstd	av2, 0x040(a0)
	vstd	av3, 0x060(a0)
	ret
        .end bitonic_sort_16