; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc -mtriple=amdgcn-amd-amdhsa -mcpu=gfx1010 --mattr=+enable-flat-scratch < %s | FileCheck -check-prefixes=GCN,FLAT_SCR_OPT %s
; RUN: llc -mtriple=amdgcn-amd-amdhsa -mcpu=gfx1010 --mattr=+architected-flat-scratch < %s | FileCheck -check-prefixes=GCN,FLAT_SCR_ARCH %s

declare void @extern_func() #0

define amdgpu_kernel void @stack_object_addrspacecast_in_kernel_no_calls() {
; FLAT_SCR_OPT-LABEL: stack_object_addrspacecast_in_kernel_no_calls:
; FLAT_SCR_OPT:       ; %bb.0:
; FLAT_SCR_OPT-NEXT:    s_add_u32 s0, s0, s3
; FLAT_SCR_OPT-NEXT:    s_addc_u32 s1, s1, 0
; FLAT_SCR_OPT-NEXT:    s_setreg_b32 hwreg(HW_REG_FLAT_SCR_LO), s0
; FLAT_SCR_OPT-NEXT:    s_setreg_b32 hwreg(HW_REG_FLAT_SCR_HI), s1
; FLAT_SCR_OPT-NEXT:    s_mov_b64 s[0:1], src_private_base
; FLAT_SCR_OPT-NEXT:    v_mov_b32_e32 v0, 4
; FLAT_SCR_OPT-NEXT:    v_mov_b32_e32 v1, s1
; FLAT_SCR_OPT-NEXT:    v_mov_b32_e32 v2, 0
; FLAT_SCR_OPT-NEXT:    flat_store_dword v[0:1], v2
; FLAT_SCR_OPT-NEXT:    s_waitcnt_vscnt null, 0x0
; FLAT_SCR_OPT-NEXT:    s_endpgm
;
; FLAT_SCR_ARCH-LABEL: stack_object_addrspacecast_in_kernel_no_calls:
; FLAT_SCR_ARCH:       ; %bb.0:
; FLAT_SCR_ARCH-NEXT:    s_mov_b64 s[0:1], src_private_base
; FLAT_SCR_ARCH-NEXT:    v_mov_b32_e32 v0, 4
; FLAT_SCR_ARCH-NEXT:    v_mov_b32_e32 v1, s1
; FLAT_SCR_ARCH-NEXT:    v_mov_b32_e32 v2, 0
; FLAT_SCR_ARCH-NEXT:    flat_store_dword v[0:1], v2
; FLAT_SCR_ARCH-NEXT:    s_waitcnt_vscnt null, 0x0
; FLAT_SCR_ARCH-NEXT:    s_endpgm
  %alloca = alloca i32, addrspace(5)
  %cast = addrspacecast ptr addrspace(5) %alloca to ptr
  store volatile i32 0, ptr %cast
  ret void
}

define amdgpu_kernel void @stack_object_in_kernel_no_calls() {
; FLAT_SCR_OPT-LABEL: stack_object_in_kernel_no_calls:
; FLAT_SCR_OPT:       ; %bb.0:
; FLAT_SCR_OPT-NEXT:    s_add_u32 s0, s0, s3
; FLAT_SCR_OPT-NEXT:    s_addc_u32 s1, s1, 0
; FLAT_SCR_OPT-NEXT:    s_setreg_b32 hwreg(HW_REG_FLAT_SCR_LO), s0
; FLAT_SCR_OPT-NEXT:    s_setreg_b32 hwreg(HW_REG_FLAT_SCR_HI), s1
; FLAT_SCR_OPT-NEXT:    v_mov_b32_e32 v0, 0
; FLAT_SCR_OPT-NEXT:    s_mov_b32 vcc_lo, 0
; FLAT_SCR_OPT-NEXT:    scratch_store_dword off, v0, vcc_lo offset:4
; FLAT_SCR_OPT-NEXT:    s_waitcnt_vscnt null, 0x0
; FLAT_SCR_OPT-NEXT:    s_endpgm
;
; FLAT_SCR_ARCH-LABEL: stack_object_in_kernel_no_calls:
; FLAT_SCR_ARCH:       ; %bb.0:
; FLAT_SCR_ARCH-NEXT:    v_mov_b32_e32 v0, 0
; FLAT_SCR_ARCH-NEXT:    s_mov_b32 vcc_lo, 0
; FLAT_SCR_ARCH-NEXT:    scratch_store_dword off, v0, vcc_lo offset:4
; FLAT_SCR_ARCH-NEXT:    s_waitcnt_vscnt null, 0x0
; FLAT_SCR_ARCH-NEXT:    s_endpgm
  %alloca = alloca i32, addrspace(5)
  store volatile i32 0, ptr addrspace(5) %alloca
  ret void
}

define amdgpu_kernel void @kernel_calls_no_stack() {
; FLAT_SCR_OPT-LABEL: kernel_calls_no_stack:
; FLAT_SCR_OPT:       ; %bb.0:
; FLAT_SCR_OPT-NEXT:    s_add_u32 s8, s8, s13
; FLAT_SCR_OPT-NEXT:    s_mov_b32 s32, 0
; FLAT_SCR_OPT-NEXT:    s_addc_u32 s9, s9, 0
; FLAT_SCR_OPT-NEXT:    s_setreg_b32 hwreg(HW_REG_FLAT_SCR_LO), s8
; FLAT_SCR_OPT-NEXT:    s_setreg_b32 hwreg(HW_REG_FLAT_SCR_HI), s9
; FLAT_SCR_OPT-NEXT:    s_mov_b64 s[8:9], s[4:5]
; FLAT_SCR_OPT-NEXT:    s_getpc_b64 s[4:5]
; FLAT_SCR_OPT-NEXT:    s_add_u32 s4, s4, extern_func@gotpcrel32@lo+4
; FLAT_SCR_OPT-NEXT:    s_addc_u32 s5, s5, extern_func@gotpcrel32@hi+12
; FLAT_SCR_OPT-NEXT:    v_lshlrev_b32_e32 v2, 20, v2
; FLAT_SCR_OPT-NEXT:    s_load_dwordx2 s[16:17], s[4:5], 0x0
; FLAT_SCR_OPT-NEXT:    v_lshlrev_b32_e32 v1, 10, v1
; FLAT_SCR_OPT-NEXT:    s_mov_b32 s14, s12
; FLAT_SCR_OPT-NEXT:    s_mov_b32 s13, s11
; FLAT_SCR_OPT-NEXT:    s_mov_b32 s12, s10
; FLAT_SCR_OPT-NEXT:    s_mov_b64 s[10:11], s[6:7]
; FLAT_SCR_OPT-NEXT:    v_or3_b32 v31, v0, v1, v2
; FLAT_SCR_OPT-NEXT:    s_mov_b64 s[4:5], s[0:1]
; FLAT_SCR_OPT-NEXT:    s_mov_b64 s[6:7], s[2:3]
; FLAT_SCR_OPT-NEXT:    s_waitcnt lgkmcnt(0)
; FLAT_SCR_OPT-NEXT:    s_swappc_b64 s[30:31], s[16:17]
; FLAT_SCR_OPT-NEXT:    s_endpgm
;
; FLAT_SCR_ARCH-LABEL: kernel_calls_no_stack:
; FLAT_SCR_ARCH:       ; %bb.0:
; FLAT_SCR_ARCH-NEXT:    s_mov_b32 s13, s9
; FLAT_SCR_ARCH-NEXT:    s_mov_b32 s12, s8
; FLAT_SCR_ARCH-NEXT:    s_mov_b64 s[8:9], s[4:5]
; FLAT_SCR_ARCH-NEXT:    s_getpc_b64 s[4:5]
; FLAT_SCR_ARCH-NEXT:    s_add_u32 s4, s4, extern_func@gotpcrel32@lo+4
; FLAT_SCR_ARCH-NEXT:    s_addc_u32 s5, s5, extern_func@gotpcrel32@hi+12
; FLAT_SCR_ARCH-NEXT:    v_lshlrev_b32_e32 v2, 20, v2
; FLAT_SCR_ARCH-NEXT:    s_load_dwordx2 s[16:17], s[4:5], 0x0
; FLAT_SCR_ARCH-NEXT:    v_lshlrev_b32_e32 v1, 10, v1
; FLAT_SCR_ARCH-NEXT:    s_mov_b32 s14, s10
; FLAT_SCR_ARCH-NEXT:    s_mov_b64 s[10:11], s[6:7]
; FLAT_SCR_ARCH-NEXT:    s_mov_b64 s[4:5], s[0:1]
; FLAT_SCR_ARCH-NEXT:    s_mov_b64 s[6:7], s[2:3]
; FLAT_SCR_ARCH-NEXT:    v_or3_b32 v31, v0, v1, v2
; FLAT_SCR_ARCH-NEXT:    s_mov_b32 s32, 0
; FLAT_SCR_ARCH-NEXT:    s_waitcnt lgkmcnt(0)
; FLAT_SCR_ARCH-NEXT:    s_swappc_b64 s[30:31], s[16:17]
; FLAT_SCR_ARCH-NEXT:    s_endpgm
  call void @extern_func()
  ret void
}

define amdgpu_kernel void @test(ptr addrspace(1) %out, i32 %in) {
; FLAT_SCR_OPT-LABEL: test:
; FLAT_SCR_OPT:       ; %bb.0:
; FLAT_SCR_OPT-NEXT:    s_add_u32 s2, s2, s5
; FLAT_SCR_OPT-NEXT:    s_addc_u32 s3, s3, 0
; FLAT_SCR_OPT-NEXT:    s_setreg_b32 hwreg(HW_REG_FLAT_SCR_LO), s2
; FLAT_SCR_OPT-NEXT:    s_setreg_b32 hwreg(HW_REG_FLAT_SCR_HI), s3
; FLAT_SCR_OPT-NEXT:    s_load_dwordx2 s[2:3], s[0:1], 0x0
; FLAT_SCR_OPT-NEXT:    s_mov_b32 s104, exec_lo
; FLAT_SCR_OPT-NEXT:    s_mov_b32 exec_lo, 3
; FLAT_SCR_OPT-NEXT:    s_mov_b32 s4, 0
; FLAT_SCR_OPT-NEXT:    scratch_store_dword off, v72, s4
; FLAT_SCR_OPT-NEXT:    s_waitcnt lgkmcnt(0)
; FLAT_SCR_OPT-NEXT:    v_writelane_b32 v72, s2, 0
; FLAT_SCR_OPT-NEXT:    s_mov_b32 s4, 4
; FLAT_SCR_OPT-NEXT:    v_writelane_b32 v72, s3, 1
; FLAT_SCR_OPT-NEXT:    scratch_store_dword off, v72, s4 ; 4-byte Folded Spill
; FLAT_SCR_OPT-NEXT:    s_waitcnt_depctr 0xffe3
; FLAT_SCR_OPT-NEXT:    s_mov_b32 s4, 0
; FLAT_SCR_OPT-NEXT:    scratch_load_dword v72, off, s4
; FLAT_SCR_OPT-NEXT:    s_waitcnt vmcnt(0)
; FLAT_SCR_OPT-NEXT:    s_waitcnt_depctr 0xffe3
; FLAT_SCR_OPT-NEXT:    s_mov_b32 exec_lo, s104
; FLAT_SCR_OPT-NEXT:    s_load_dword vcc_lo, s[0:1], 0x8
; FLAT_SCR_OPT-NEXT:    ; kill: killed $sgpr0_sgpr1
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    s_waitcnt lgkmcnt(0)
; FLAT_SCR_OPT-NEXT:    v_mov_b32_e32 v0, vcc_lo
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    ;;#ASMSTART
; FLAT_SCR_OPT-NEXT:    ;;#ASMEND
; FLAT_SCR_OPT-NEXT:    s_mov_b32 s2, exec_lo
; FLAT_SCR_OPT-NEXT:    s_mov_b32 exec_lo, 3
; FLAT_SCR_OPT-NEXT:    s_mov_b32 s3, 0
; FLAT_SCR_OPT-NEXT:    scratch_store_dword off, v2, s3
; FLAT_SCR_OPT-NEXT:    s_waitcnt_depctr 0xffe3
; FLAT_SCR_OPT-NEXT:    s_mov_b32 s3, 4
; FLAT_SCR_OPT-NEXT:    scratch_load_dword v2, off, s3 ; 4-byte Folded Reload
; FLAT_SCR_OPT-NEXT:    s_waitcnt_depctr 0xffe3
; FLAT_SCR_OPT-NEXT:    s_mov_b32 s3, 0
; FLAT_SCR_OPT-NEXT:    s_waitcnt vmcnt(0)
; FLAT_SCR_OPT-NEXT:    v_readlane_b32 s0, v2, 0
; FLAT_SCR_OPT-NEXT:    v_readlane_b32 s1, v2, 1
; FLAT_SCR_OPT-NEXT:    scratch_load_dword v2, off, s3
; FLAT_SCR_OPT-NEXT:    s_waitcnt vmcnt(0)
; FLAT_SCR_OPT-NEXT:    s_waitcnt_depctr 0xffe3
; FLAT_SCR_OPT-NEXT:    s_mov_b32 exec_lo, s2
; FLAT_SCR_OPT-NEXT:    v_mov_b32_e32 v1, 0
; FLAT_SCR_OPT-NEXT:    global_store_dword v1, v0, s[0:1]
; FLAT_SCR_OPT-NEXT:    s_endpgm
;
; FLAT_SCR_ARCH-LABEL: test:
; FLAT_SCR_ARCH:       ; %bb.0:
; FLAT_SCR_ARCH-NEXT:    s_load_dwordx2 s[2:3], s[0:1], 0x0
; FLAT_SCR_ARCH-NEXT:    s_mov_b32 s104, exec_lo
; FLAT_SCR_ARCH-NEXT:    s_mov_b32 exec_lo, 3
; FLAT_SCR_ARCH-NEXT:    s_mov_b32 s4, 0
; FLAT_SCR_ARCH-NEXT:    scratch_store_dword off, v72, s4
; FLAT_SCR_ARCH-NEXT:    s_waitcnt lgkmcnt(0)
; FLAT_SCR_ARCH-NEXT:    v_writelane_b32 v72, s2, 0
; FLAT_SCR_ARCH-NEXT:    s_mov_b32 s4, 4
; FLAT_SCR_ARCH-NEXT:    v_writelane_b32 v72, s3, 1
; FLAT_SCR_ARCH-NEXT:    scratch_store_dword off, v72, s4 ; 4-byte Folded Spill
; FLAT_SCR_ARCH-NEXT:    s_waitcnt_depctr 0xffe3
; FLAT_SCR_ARCH-NEXT:    s_mov_b32 s4, 0
; FLAT_SCR_ARCH-NEXT:    scratch_load_dword v72, off, s4
; FLAT_SCR_ARCH-NEXT:    s_waitcnt vmcnt(0)
; FLAT_SCR_ARCH-NEXT:    s_waitcnt_depctr 0xffe3
; FLAT_SCR_ARCH-NEXT:    s_mov_b32 exec_lo, s104
; FLAT_SCR_ARCH-NEXT:    s_load_dword vcc_lo, s[0:1], 0x8
; FLAT_SCR_ARCH-NEXT:    ; kill: killed $sgpr0_sgpr1
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    s_waitcnt lgkmcnt(0)
; FLAT_SCR_ARCH-NEXT:    v_mov_b32_e32 v0, vcc_lo
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    ;;#ASMSTART
; FLAT_SCR_ARCH-NEXT:    ;;#ASMEND
; FLAT_SCR_ARCH-NEXT:    s_mov_b32 s2, exec_lo
; FLAT_SCR_ARCH-NEXT:    s_mov_b32 exec_lo, 3
; FLAT_SCR_ARCH-NEXT:    s_mov_b32 s3, 0
; FLAT_SCR_ARCH-NEXT:    scratch_store_dword off, v2, s3
; FLAT_SCR_ARCH-NEXT:    s_waitcnt_depctr 0xffe3
; FLAT_SCR_ARCH-NEXT:    s_mov_b32 s3, 4
; FLAT_SCR_ARCH-NEXT:    scratch_load_dword v2, off, s3 ; 4-byte Folded Reload
; FLAT_SCR_ARCH-NEXT:    s_waitcnt_depctr 0xffe3
; FLAT_SCR_ARCH-NEXT:    s_mov_b32 s3, 0
; FLAT_SCR_ARCH-NEXT:    s_waitcnt vmcnt(0)
; FLAT_SCR_ARCH-NEXT:    v_readlane_b32 s0, v2, 0
; FLAT_SCR_ARCH-NEXT:    v_readlane_b32 s1, v2, 1
; FLAT_SCR_ARCH-NEXT:    scratch_load_dword v2, off, s3
; FLAT_SCR_ARCH-NEXT:    s_waitcnt vmcnt(0)
; FLAT_SCR_ARCH-NEXT:    s_waitcnt_depctr 0xffe3
; FLAT_SCR_ARCH-NEXT:    s_mov_b32 exec_lo, s2
; FLAT_SCR_ARCH-NEXT:    v_mov_b32_e32 v1, 0
; FLAT_SCR_ARCH-NEXT:    global_store_dword v1, v0, s[0:1]
; FLAT_SCR_ARCH-NEXT:    s_endpgm
  call void asm sideeffect "", "~{s[0:7]}" ()
  call void asm sideeffect "", "~{s[8:15]}" ()
  call void asm sideeffect "", "~{s[16:23]}" ()
  call void asm sideeffect "", "~{s[24:31]}" ()
  call void asm sideeffect "", "~{s[32:39]}" ()
  call void asm sideeffect "", "~{s[40:47]}" ()
  call void asm sideeffect "", "~{s[48:55]}" ()
  call void asm sideeffect "", "~{s[56:63]}" ()
  call void asm sideeffect "", "~{s[64:71]}" ()
  call void asm sideeffect "", "~{s[72:79]}" ()
  call void asm sideeffect "", "~{s[80:87]}" ()
  call void asm sideeffect "", "~{s[88:95]}" ()
  call void asm sideeffect "", "~{s[96:103]}" ()
  call void asm sideeffect "", "~{s[104:105]}" ()
  call void asm sideeffect "", "~{v[0:7]}" ()
  call void asm sideeffect "", "~{v[8:15]}" ()
  call void asm sideeffect "", "~{v[16:23]}" ()
  call void asm sideeffect "", "~{v[24:31]}" ()
  call void asm sideeffect "", "~{v[32:39]}" ()
  call void asm sideeffect "", "~{v[40:47]}" ()
  call void asm sideeffect "", "~{v[48:55]}" ()
  call void asm sideeffect "", "~{v[56:63]}" ()
  call void asm sideeffect "", "~{v[64:71]}" ()
  call void asm sideeffect "", "~{v[72:79]}" ()
  call void asm sideeffect "", "~{v[80:87]}" ()
  call void asm sideeffect "", "~{v[88:95]}" ()
  call void asm sideeffect "", "~{v[96:103]}" ()
  call void asm sideeffect "", "~{v[104:111]}" ()
  call void asm sideeffect "", "~{v[112:119]}" ()
  call void asm sideeffect "", "~{v[120:127]}" ()
  call void asm sideeffect "", "~{v[128:135]}" ()
  call void asm sideeffect "", "~{v[136:143]}" ()
  call void asm sideeffect "", "~{v[144:151]}" ()
  call void asm sideeffect "", "~{v[152:159]}" ()
  call void asm sideeffect "", "~{v[160:167]}" ()
  call void asm sideeffect "", "~{v[168:175]}" ()
  call void asm sideeffect "", "~{v[176:183]}" ()
  call void asm sideeffect "", "~{v[184:191]}" ()
  call void asm sideeffect "", "~{v[192:199]}" ()
  call void asm sideeffect "", "~{v[200:207]}" ()
  call void asm sideeffect "", "~{v[208:215]}" ()
  call void asm sideeffect "", "~{v[216:223]}" ()
  call void asm sideeffect "", "~{v[224:231]}" ()
  call void asm sideeffect "", "~{v[232:239]}" ()
  call void asm sideeffect "", "~{v[240:247]}" ()
  call void asm sideeffect "", "~{v[248:255]}" ()

  store i32 %in, ptr addrspace(1) %out
  ret void
}

define amdgpu_kernel void @kernel_no_calls_no_stack() {
; GCN-LABEL: kernel_no_calls_no_stack:
; GCN:       ; %bb.0:
; GCN-NEXT:    s_endpgm
  ret void
}

attributes #0 = { nounwind }
