# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
# RUN: llc -mtriple powerpc64le-unknown-linux-gnu -o - %s -verify-machineinstrs \
# RUN:   -run-pass=machine-sink | FileCheck %s

--- |
  ; ModuleID = 'sink-down-more-instructions-regpressure-high.ll'
  source_filename = "sink-down-more-instructions-regpressure-high.c"
  target datalayout = "e-m:e-i64:64-n32:64"
  target triple = "powerpc64le-unknown-linux-gnu"

  ; This file check that %16:gprc in MIR can not be sunk down because of high
  ; register pressure in destination block.

  ; Function Attrs: nofree norecurse nounwind
  define dso_local signext i32 @foo(i32 signext %0, i32 signext %1, i32* nocapture readonly %2, i32* nocapture %3, i32 signext %4, i32* nocapture readonly %5, i32* nocapture readonly %6, i32* nocapture readonly %7, i32* nocapture readonly %8, i32* nocapture readonly %9, i32* nocapture readonly %10, i32* nocapture readonly %11, i32* nocapture readonly %12, i32* nocapture readonly %13, i32* nocapture readonly %14, i32* nocapture readonly %15, i32* nocapture readonly %16, i32* nocapture readonly %17, i32* nocapture readonly %18, i32* nocapture readonly %19, i32* nocapture readonly %20, i32* nocapture readonly %21, i32* nocapture readonly %22, i32* nocapture readonly %23, i32* nocapture readonly %24, i32* nocapture readonly %25, i32* nocapture readonly %26, i32* nocapture readonly %27, i32* nocapture readonly %28, i32* nocapture readonly %29, i32* nocapture readonly %30, i32* nocapture readonly %31, i32* nocapture readonly %32, i32* nocapture readonly %33, i32* nocapture readonly %34, i32* nocapture readonly %35, i32* nocapture readonly %36) local_unnamed_addr #0 {
    %38 = icmp sgt i32 %4, 0
    br i1 %38, label %39, label %41

  39:                                               ; preds = %37
    %40 = zext i32 %4 to i64
    %scevgep = getelementptr i32, i32* %2, i64 -1
    %scevgep69 = bitcast i32* %scevgep to i8*
    %scevgep70 = getelementptr i32, i32* %5, i64 -1
    %scevgep7071 = bitcast i32* %scevgep70 to i8*
    %scevgep72 = getelementptr i32, i32* %6, i64 -1
    %scevgep7273 = bitcast i32* %scevgep72 to i8*
    call void @llvm.set.loop.iterations.i64(i64 %40)
    br label %42

  41:                                               ; preds = %65, %37
    ret i32 undef

  42:                                               ; preds = %65, %39
    %lsr.iv = phi i64 [ %lsr.iv.next, %65 ], [ 0, %39 ]
    %43 = phi i64 [ 0, %39 ], [ %163, %65 ]
    %44 = phi i32 [ 0, %39 ], [ %58, %65 ]
    %45 = phi i8* [ %scevgep69, %39 ], [ %52, %65 ]
    %46 = phi i8* [ %scevgep7071, %39 ], [ %50, %65 ]
    %47 = phi i8* [ %scevgep7273, %39 ], [ %48, %65 ]
    %48 = getelementptr i8, i8* %47, i64 4
    %49 = bitcast i8* %48 to i32*
    %50 = getelementptr i8, i8* %46, i64 4
    %51 = bitcast i8* %50 to i32*
    %52 = getelementptr i8, i8* %45, i64 4
    %53 = bitcast i8* %52 to i32*
    %lsr68 = trunc i64 %43 to i32
    %54 = udiv i32 %lsr68, 30
    %55 = mul nuw nsw i32 %54, 30
    %56 = sub i32 %lsr68, %55
    %57 = load i32, i32* %53, align 4, !tbaa !2
    %58 = add nsw i32 %57, %44
    switch i32 %0, label %64 [
      i32 1, label %59
      i32 3, label %62
    ]

  59:                                               ; preds = %42
    %60 = trunc i64 %43 to i32
    %61 = shl i32 %60, 1
    br label %65

  62:                                               ; preds = %42
    %63 = add nuw nsw i32 %lsr68, 100
    br label %65

  64:                                               ; preds = %42
    br label %65

  65:                                               ; preds = %64, %62, %59
    %66 = phi i32 [ %56, %64 ], [ %63, %62 ], [ %61, %59 ]
    %67 = bitcast i32* %7 to i8*
    %68 = bitcast i32* %8 to i8*
    %69 = bitcast i32* %9 to i8*
    %70 = bitcast i32* %10 to i8*
    %71 = bitcast i32* %11 to i8*
    %72 = bitcast i32* %12 to i8*
    %73 = bitcast i32* %13 to i8*
    %74 = bitcast i32* %14 to i8*
    %75 = bitcast i32* %15 to i8*
    %76 = bitcast i32* %16 to i8*
    %77 = bitcast i32* %17 to i8*
    %78 = bitcast i32* %18 to i8*
    %79 = bitcast i32* %19 to i8*
    %80 = bitcast i32* %20 to i8*
    %81 = bitcast i32* %21 to i8*
    %82 = bitcast i32* %22 to i8*
    %83 = bitcast i32* %23 to i8*
    %84 = bitcast i32* %24 to i8*
    %85 = bitcast i32* %25 to i8*
    %86 = bitcast i32* %26 to i8*
    %87 = bitcast i32* %27 to i8*
    %88 = bitcast i32* %28 to i8*
    %89 = bitcast i32* %29 to i8*
    %90 = bitcast i32* %30 to i8*
    %91 = bitcast i32* %31 to i8*
    %92 = bitcast i32* %32 to i8*
    %93 = bitcast i32* %33 to i8*
    %94 = bitcast i32* %34 to i8*
    %95 = bitcast i32* %35 to i8*
    %96 = bitcast i32* %36 to i8*
    %97 = bitcast i32* %3 to i8*
    %98 = add nsw i32 %66, %58
    %99 = load i32, i32* %51, align 4, !tbaa !2
    %100 = add nsw i32 %98, %99
    %101 = load i32, i32* %49, align 4, !tbaa !2
    %102 = add nsw i32 %100, %101
    %uglygep60 = getelementptr i8, i8* %67, i64 %lsr.iv
    %uglygep6061 = bitcast i8* %uglygep60 to i32*
    %103 = load i32, i32* %uglygep6061, align 4, !tbaa !2
    %104 = add nsw i32 %102, %103
    %uglygep58 = getelementptr i8, i8* %68, i64 %lsr.iv
    %uglygep5859 = bitcast i8* %uglygep58 to i32*
    %105 = load i32, i32* %uglygep5859, align 4, !tbaa !2
    %106 = add nsw i32 %104, %105
    %uglygep56 = getelementptr i8, i8* %69, i64 %lsr.iv
    %uglygep5657 = bitcast i8* %uglygep56 to i32*
    %107 = load i32, i32* %uglygep5657, align 4, !tbaa !2
    %108 = add nsw i32 %106, %107
    %uglygep54 = getelementptr i8, i8* %70, i64 %lsr.iv
    %uglygep5455 = bitcast i8* %uglygep54 to i32*
    %109 = load i32, i32* %uglygep5455, align 4, !tbaa !2
    %110 = add nsw i32 %108, %109
    %uglygep52 = getelementptr i8, i8* %71, i64 %lsr.iv
    %uglygep5253 = bitcast i8* %uglygep52 to i32*
    %111 = load i32, i32* %uglygep5253, align 4, !tbaa !2
    %112 = add nsw i32 %110, %111
    %uglygep50 = getelementptr i8, i8* %72, i64 %lsr.iv
    %uglygep5051 = bitcast i8* %uglygep50 to i32*
    %113 = load i32, i32* %uglygep5051, align 4, !tbaa !2
    %114 = add nsw i32 %112, %113
    %uglygep48 = getelementptr i8, i8* %73, i64 %lsr.iv
    %uglygep4849 = bitcast i8* %uglygep48 to i32*
    %115 = load i32, i32* %uglygep4849, align 4, !tbaa !2
    %116 = add nsw i32 %114, %115
    %uglygep46 = getelementptr i8, i8* %74, i64 %lsr.iv
    %uglygep4647 = bitcast i8* %uglygep46 to i32*
    %117 = load i32, i32* %uglygep4647, align 4, !tbaa !2
    %118 = add nsw i32 %116, %117
    %uglygep44 = getelementptr i8, i8* %75, i64 %lsr.iv
    %uglygep4445 = bitcast i8* %uglygep44 to i32*
    %119 = load i32, i32* %uglygep4445, align 4, !tbaa !2
    %120 = add nsw i32 %118, %119
    %uglygep42 = getelementptr i8, i8* %76, i64 %lsr.iv
    %uglygep4243 = bitcast i8* %uglygep42 to i32*
    %121 = load i32, i32* %uglygep4243, align 4, !tbaa !2
    %122 = add nsw i32 %120, %121
    %uglygep40 = getelementptr i8, i8* %77, i64 %lsr.iv
    %uglygep4041 = bitcast i8* %uglygep40 to i32*
    %123 = load i32, i32* %uglygep4041, align 4, !tbaa !2
    %124 = add nsw i32 %122, %123
    %uglygep38 = getelementptr i8, i8* %78, i64 %lsr.iv
    %uglygep3839 = bitcast i8* %uglygep38 to i32*
    %125 = load i32, i32* %uglygep3839, align 4, !tbaa !2
    %126 = add nsw i32 %124, %125
    %uglygep36 = getelementptr i8, i8* %79, i64 %lsr.iv
    %uglygep3637 = bitcast i8* %uglygep36 to i32*
    %127 = load i32, i32* %uglygep3637, align 4, !tbaa !2
    %128 = add nsw i32 %126, %127
    %uglygep34 = getelementptr i8, i8* %80, i64 %lsr.iv
    %uglygep3435 = bitcast i8* %uglygep34 to i32*
    %129 = load i32, i32* %uglygep3435, align 4, !tbaa !2
    %130 = add nsw i32 %128, %129
    %uglygep32 = getelementptr i8, i8* %81, i64 %lsr.iv
    %uglygep3233 = bitcast i8* %uglygep32 to i32*
    %131 = load i32, i32* %uglygep3233, align 4, !tbaa !2
    %132 = add nsw i32 %130, %131
    %uglygep30 = getelementptr i8, i8* %82, i64 %lsr.iv
    %uglygep3031 = bitcast i8* %uglygep30 to i32*
    %133 = load i32, i32* %uglygep3031, align 4, !tbaa !2
    %134 = add nsw i32 %132, %133
    %uglygep28 = getelementptr i8, i8* %83, i64 %lsr.iv
    %uglygep2829 = bitcast i8* %uglygep28 to i32*
    %135 = load i32, i32* %uglygep2829, align 4, !tbaa !2
    %136 = add nsw i32 %134, %135
    %uglygep26 = getelementptr i8, i8* %84, i64 %lsr.iv
    %uglygep2627 = bitcast i8* %uglygep26 to i32*
    %137 = load i32, i32* %uglygep2627, align 4, !tbaa !2
    %138 = add nsw i32 %136, %137
    %uglygep24 = getelementptr i8, i8* %85, i64 %lsr.iv
    %uglygep2425 = bitcast i8* %uglygep24 to i32*
    %139 = load i32, i32* %uglygep2425, align 4, !tbaa !2
    %140 = add nsw i32 %138, %139
    %uglygep22 = getelementptr i8, i8* %86, i64 %lsr.iv
    %uglygep2223 = bitcast i8* %uglygep22 to i32*
    %141 = load i32, i32* %uglygep2223, align 4, !tbaa !2
    %142 = add nsw i32 %140, %141
    %uglygep20 = getelementptr i8, i8* %87, i64 %lsr.iv
    %uglygep2021 = bitcast i8* %uglygep20 to i32*
    %143 = load i32, i32* %uglygep2021, align 4, !tbaa !2
    %144 = add nsw i32 %142, %143
    %uglygep18 = getelementptr i8, i8* %88, i64 %lsr.iv
    %uglygep1819 = bitcast i8* %uglygep18 to i32*
    %145 = load i32, i32* %uglygep1819, align 4, !tbaa !2
    %146 = add nsw i32 %144, %145
    %uglygep16 = getelementptr i8, i8* %89, i64 %lsr.iv
    %uglygep1617 = bitcast i8* %uglygep16 to i32*
    %147 = load i32, i32* %uglygep1617, align 4, !tbaa !2
    %148 = add nsw i32 %146, %147
    %uglygep14 = getelementptr i8, i8* %90, i64 %lsr.iv
    %uglygep1415 = bitcast i8* %uglygep14 to i32*
    %149 = load i32, i32* %uglygep1415, align 4, !tbaa !2
    %150 = add nsw i32 %148, %149
    %uglygep12 = getelementptr i8, i8* %91, i64 %lsr.iv
    %uglygep1213 = bitcast i8* %uglygep12 to i32*
    %151 = load i32, i32* %uglygep1213, align 4, !tbaa !2
    %152 = add nsw i32 %150, %151
    %uglygep10 = getelementptr i8, i8* %92, i64 %lsr.iv
    %uglygep1011 = bitcast i8* %uglygep10 to i32*
    %153 = load i32, i32* %uglygep1011, align 4, !tbaa !2
    %154 = add nsw i32 %152, %153
    %uglygep8 = getelementptr i8, i8* %93, i64 %lsr.iv
    %uglygep89 = bitcast i8* %uglygep8 to i32*
    %155 = load i32, i32* %uglygep89, align 4, !tbaa !2
    %156 = add nsw i32 %154, %155
    %uglygep6 = getelementptr i8, i8* %94, i64 %lsr.iv
    %uglygep67 = bitcast i8* %uglygep6 to i32*
    %157 = load i32, i32* %uglygep67, align 4, !tbaa !2
    %158 = add nsw i32 %156, %157
    %uglygep4 = getelementptr i8, i8* %95, i64 %lsr.iv
    %uglygep45 = bitcast i8* %uglygep4 to i32*
    %159 = load i32, i32* %uglygep45, align 4, !tbaa !2
    %160 = add nsw i32 %158, %159
    %uglygep2 = getelementptr i8, i8* %96, i64 %lsr.iv
    %uglygep23 = bitcast i8* %uglygep2 to i32*
    %161 = load i32, i32* %uglygep23, align 4, !tbaa !2
    %162 = add nsw i32 %160, %161
    %uglygep = getelementptr i8, i8* %97, i64 %lsr.iv
    %uglygep1 = bitcast i8* %uglygep to i32*
    store i32 %162, i32* %uglygep1, align 4, !tbaa !2
    %163 = add nuw nsw i64 %43, 1
    %lsr.iv.next = add nuw nsw i64 %lsr.iv, 4
    %164 = call i1 @llvm.loop.decrement.i64(i64 1)
    br i1 %164, label %42, label %41
  }

  ; Function Attrs: noduplicate nounwind
  declare void @llvm.set.loop.iterations.i64(i64) #1

  ; Function Attrs: noduplicate nounwind
  declare i1 @llvm.loop.decrement.i64(i64) #1

  attributes #0 = { nofree norecurse nounwind "correctly-rounded-divide-sqrt-fp-math"="false" "disable-tail-calls"="false" "frame-pointer"="none" "less-precise-fpmad"="false" "min-legal-vector-width"="0" "no-infs-fp-math"="false" "no-jump-tables"="false" "no-nans-fp-math"="false" "no-signed-zeros-fp-math"="false" "no-trapping-math"="true" "stack-protector-buffer-size"="8" "target-cpu"="ppc64le" "target-features"="+altivec,+bpermd,+crypto,+direct-move,+extdiv,+htm,+power8-vector,+vsx,-power9-vector,-spe" "unsafe-fp-math"="false" "use-soft-float"="false" }
  attributes #1 = { noduplicate nounwind }

  !llvm.module.flags = !{!0}
  !llvm.ident = !{!1}

  !0 = !{i32 1, !"wchar_size", i32 4}
  !1 = !{!"clang version 12.0.0"}
  !2 = !{!3, !3, i64 0}
  !3 = !{!"int", !4, i64 0}
  !4 = !{!"omnipotent char", !5, i64 0}
  !5 = !{!"Simple C/C++ TBAA"}

...
---
name:            foo
alignment:       16
tracksRegLiveness: true
registers:
  - { id: 0, class: g8rc }
  - { id: 1, class: g8rc }
  - { id: 2, class: g8rc }
  - { id: 3, class: g8rc_and_g8rc_nox0 }
  - { id: 4, class: g8rc_and_g8rc_nox0 }
  - { id: 5, class: gprc }
  - { id: 6, class: g8rc_and_g8rc_nox0 }
  - { id: 7, class: g8rc_and_g8rc_nox0 }
  - { id: 8, class: g8rc_and_g8rc_nox0 }
  - { id: 9, class: g8rc }
  - { id: 10, class: g8rc_and_g8rc_nox0 }
  - { id: 11, class: g8rc }
  - { id: 12, class: g8rc_and_g8rc_nox0 }
  - { id: 13, class: g8rc }
  - { id: 14, class: gprc_and_gprc_nor0 }
  - { id: 15, class: gprc }
  - { id: 16, class: gprc }
  - { id: 17, class: gprc }
  - { id: 18, class: gprc }
  - { id: 19, class: gprc }
  - { id: 20, class: g8rc }
  - { id: 21, class: g8rc }
  - { id: 22, class: g8rc }
  - { id: 23, class: g8rc }
  - { id: 24, class: g8rc_and_g8rc_nox0 }
  - { id: 25, class: g8rc_and_g8rc_nox0 }
  - { id: 26, class: g8rc }
  - { id: 27, class: g8rc_and_g8rc_nox0 }
  - { id: 28, class: g8rc_and_g8rc_nox0 }
  - { id: 29, class: g8rc_and_g8rc_nox0 }
  - { id: 30, class: gprc }
  - { id: 31, class: gprc }
  - { id: 32, class: g8rc_and_g8rc_nox0 }
  - { id: 33, class: g8rc_and_g8rc_nox0 }
  - { id: 34, class: g8rc_and_g8rc_nox0 }
  - { id: 35, class: g8rc_and_g8rc_nox0 }
  - { id: 36, class: g8rc_and_g8rc_nox0 }
  - { id: 37, class: g8rc_and_g8rc_nox0 }
  - { id: 38, class: g8rc_and_g8rc_nox0 }
  - { id: 39, class: g8rc_and_g8rc_nox0 }
  - { id: 40, class: g8rc_and_g8rc_nox0 }
  - { id: 41, class: g8rc_and_g8rc_nox0 }
  - { id: 42, class: g8rc_and_g8rc_nox0 }
  - { id: 43, class: g8rc_and_g8rc_nox0 }
  - { id: 44, class: g8rc_and_g8rc_nox0 }
  - { id: 45, class: g8rc_and_g8rc_nox0 }
  - { id: 46, class: g8rc_and_g8rc_nox0 }
  - { id: 47, class: g8rc_and_g8rc_nox0 }
  - { id: 48, class: g8rc_and_g8rc_nox0 }
  - { id: 49, class: g8rc_and_g8rc_nox0 }
  - { id: 50, class: g8rc_and_g8rc_nox0 }
  - { id: 51, class: g8rc_and_g8rc_nox0 }
  - { id: 52, class: g8rc_and_g8rc_nox0 }
  - { id: 53, class: g8rc_and_g8rc_nox0 }
  - { id: 54, class: g8rc_and_g8rc_nox0 }
  - { id: 55, class: g8rc_and_g8rc_nox0 }
  - { id: 56, class: g8rc_and_g8rc_nox0 }
  - { id: 57, class: g8rc_and_g8rc_nox0 }
  - { id: 58, class: g8rc_and_g8rc_nox0 }
  - { id: 59, class: g8rc_and_g8rc_nox0 }
  - { id: 60, class: g8rc_and_g8rc_nox0 }
  - { id: 61, class: crrc }
  - { id: 62, class: g8rc }
  - { id: 63, class: gprc }
  - { id: 64, class: g8rc }
  - { id: 65, class: g8rc }
  - { id: 66, class: g8rc }
  - { id: 67, class: gprc }
  - { id: 68, class: g8rc_and_g8rc_nox0 }
  - { id: 69, class: gprc }
  - { id: 70, class: gprc }
  - { id: 71, class: gprc }
  - { id: 72, class: gprc }
  - { id: 73, class: gprc }
  - { id: 74, class: crrc }
  - { id: 75, class: crrc }
  - { id: 76, class: gprc }
  - { id: 77, class: gprc }
  - { id: 78, class: gprc }
  - { id: 79, class: gprc }
  - { id: 80, class: gprc }
  - { id: 81, class: gprc }
  - { id: 82, class: gprc }
  - { id: 83, class: gprc }
  - { id: 84, class: gprc }
  - { id: 85, class: gprc }
  - { id: 86, class: gprc }
  - { id: 87, class: gprc }
  - { id: 88, class: gprc }
  - { id: 89, class: gprc }
  - { id: 90, class: gprc }
  - { id: 91, class: gprc }
  - { id: 92, class: gprc }
  - { id: 93, class: gprc }
  - { id: 94, class: gprc }
  - { id: 95, class: gprc }
  - { id: 96, class: gprc }
  - { id: 97, class: gprc }
  - { id: 98, class: gprc }
  - { id: 99, class: gprc }
  - { id: 100, class: gprc }
  - { id: 101, class: gprc }
  - { id: 102, class: gprc }
  - { id: 103, class: gprc }
  - { id: 104, class: gprc }
  - { id: 105, class: gprc }
  - { id: 106, class: gprc }
  - { id: 107, class: gprc }
  - { id: 108, class: gprc }
  - { id: 109, class: gprc }
  - { id: 110, class: gprc }
  - { id: 111, class: gprc }
  - { id: 112, class: gprc }
  - { id: 113, class: gprc }
  - { id: 114, class: gprc }
  - { id: 115, class: gprc }
  - { id: 116, class: gprc }
  - { id: 117, class: gprc }
  - { id: 118, class: gprc }
  - { id: 119, class: gprc }
  - { id: 120, class: gprc }
  - { id: 121, class: gprc }
  - { id: 122, class: gprc }
  - { id: 123, class: gprc }
  - { id: 124, class: gprc }
  - { id: 125, class: gprc }
  - { id: 126, class: gprc }
  - { id: 127, class: gprc }
  - { id: 128, class: gprc }
  - { id: 129, class: gprc }
  - { id: 130, class: gprc }
  - { id: 131, class: gprc }
  - { id: 132, class: gprc }
  - { id: 133, class: gprc }
  - { id: 134, class: gprc }
  - { id: 135, class: gprc }
  - { id: 136, class: gprc }
  - { id: 137, class: gprc }
  - { id: 138, class: gprc }
  - { id: 139, class: gprc }
  - { id: 140, class: gprc }
  - { id: 141, class: gprc }
  - { id: 142, class: g8rc }
liveins:
  - { reg: '$x3', virtual-reg: '%22' }
  - { reg: '$x5', virtual-reg: '%24' }
  - { reg: '$x6', virtual-reg: '%25' }
  - { reg: '$x7', virtual-reg: '%26' }
  - { reg: '$x8', virtual-reg: '%27' }
  - { reg: '$x9', virtual-reg: '%28' }
  - { reg: '$x10', virtual-reg: '%29' }
frameInfo:
  maxAlignment:    1
fixedStack:
  - { id: 0, offset: 320, size: 8, alignment: 16, isImmutable: true }
  - { id: 1, offset: 312, size: 8, alignment: 8, isImmutable: true }
  - { id: 2, offset: 304, size: 8, alignment: 16, isImmutable: true }
  - { id: 3, offset: 296, size: 8, alignment: 8, isImmutable: true }
  - { id: 4, offset: 288, size: 8, alignment: 16, isImmutable: true }
  - { id: 5, offset: 280, size: 8, alignment: 8, isImmutable: true }
  - { id: 6, offset: 272, size: 8, alignment: 16, isImmutable: true }
  - { id: 7, offset: 264, size: 8, alignment: 8, isImmutable: true }
  - { id: 8, offset: 256, size: 8, alignment: 16, isImmutable: true }
  - { id: 9, offset: 248, size: 8, alignment: 8, isImmutable: true }
  - { id: 10, offset: 240, size: 8, alignment: 16, isImmutable: true }
  - { id: 11, offset: 232, size: 8, alignment: 8, isImmutable: true }
  - { id: 12, offset: 224, size: 8, alignment: 16, isImmutable: true }
  - { id: 13, offset: 216, size: 8, alignment: 8, isImmutable: true }
  - { id: 14, offset: 208, size: 8, alignment: 16, isImmutable: true }
  - { id: 15, offset: 200, size: 8, alignment: 8, isImmutable: true }
  - { id: 16, offset: 192, size: 8, alignment: 16, isImmutable: true }
  - { id: 17, offset: 184, size: 8, alignment: 8, isImmutable: true }
  - { id: 18, offset: 176, size: 8, alignment: 16, isImmutable: true }
  - { id: 19, offset: 168, size: 8, alignment: 8, isImmutable: true }
  - { id: 20, offset: 160, size: 8, alignment: 16, isImmutable: true }
  - { id: 21, offset: 152, size: 8, alignment: 8, isImmutable: true }
  - { id: 22, offset: 144, size: 8, alignment: 16, isImmutable: true }
  - { id: 23, offset: 136, size: 8, alignment: 8, isImmutable: true }
  - { id: 24, offset: 128, size: 8, alignment: 16, isImmutable: true }
  - { id: 25, offset: 120, size: 8, alignment: 8, isImmutable: true }
  - { id: 26, offset: 112, size: 8, alignment: 16, isImmutable: true }
  - { id: 27, offset: 104, size: 8, alignment: 8, isImmutable: true }
  - { id: 28, offset: 96, size: 8, alignment: 16, isImmutable: true }
machineFunctionInfo: {}
body:             |
  ; CHECK-LABEL: name: foo
  ; CHECK: bb.0 (%ir-block.37):
  ; CHECK:   successors: %bb.1(0x50000000), %bb.2(0x30000000)
  ; CHECK:   liveins: $x3, $x5, $x6, $x7, $x8, $x9, $x10
  ; CHECK:   [[COPY:%[0-9]+]]:g8rc_and_g8rc_nox0 = COPY $x10
  ; CHECK:   [[COPY1:%[0-9]+]]:g8rc_and_g8rc_nox0 = COPY $x9
  ; CHECK:   [[COPY2:%[0-9]+]]:g8rc_and_g8rc_nox0 = COPY $x8
  ; CHECK:   [[COPY3:%[0-9]+]]:g8rc = COPY $x7
  ; CHECK:   [[COPY4:%[0-9]+]]:g8rc_and_g8rc_nox0 = COPY $x6
  ; CHECK:   [[COPY5:%[0-9]+]]:g8rc_and_g8rc_nox0 = COPY $x5
  ; CHECK:   [[COPY6:%[0-9]+]]:g8rc = COPY $x3
  ; CHECK:   [[COPY7:%[0-9]+]]:gprc = COPY [[COPY3]].sub_32
  ; CHECK:   [[CMPWI:%[0-9]+]]:crrc = CMPWI [[COPY7]], 1
  ; CHECK:   BCC 12, killed [[CMPWI]], %bb.2
  ; CHECK:   B %bb.1
  ; CHECK: bb.1 (%ir-block.39):
  ; CHECK:   successors: %bb.3(0x80000000)
  ; CHECK:   [[COPY8:%[0-9]+]]:gprc = COPY [[COPY6]].sub_32
  ; CHECK:   [[LD:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.28 :: (load (s64) from %fixed-stack.28, align 16)
  ; CHECK:   [[LD1:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.27 :: (load (s64) from %fixed-stack.27)
  ; CHECK:   [[LD2:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.26 :: (load (s64) from %fixed-stack.26, align 16)
  ; CHECK:   [[LD3:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.25 :: (load (s64) from %fixed-stack.25)
  ; CHECK:   [[LD4:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.24 :: (load (s64) from %fixed-stack.24, align 16)
  ; CHECK:   [[LD5:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.23 :: (load (s64) from %fixed-stack.23)
  ; CHECK:   [[LD6:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.22 :: (load (s64) from %fixed-stack.22, align 16)
  ; CHECK:   [[LD7:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.21 :: (load (s64) from %fixed-stack.21)
  ; CHECK:   [[LD8:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.20 :: (load (s64) from %fixed-stack.20, align 16)
  ; CHECK:   [[LD9:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.19 :: (load (s64) from %fixed-stack.19)
  ; CHECK:   [[LD10:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.18 :: (load (s64) from %fixed-stack.18, align 16)
  ; CHECK:   [[LD11:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.17 :: (load (s64) from %fixed-stack.17)
  ; CHECK:   [[LD12:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.16 :: (load (s64) from %fixed-stack.16, align 16)
  ; CHECK:   [[LD13:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.15 :: (load (s64) from %fixed-stack.15)
  ; CHECK:   [[LD14:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.14 :: (load (s64) from %fixed-stack.14, align 16)
  ; CHECK:   [[LD15:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.13 :: (load (s64) from %fixed-stack.13)
  ; CHECK:   [[LD16:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.12 :: (load (s64) from %fixed-stack.12, align 16)
  ; CHECK:   [[LD17:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.11 :: (load (s64) from %fixed-stack.11)
  ; CHECK:   [[LD18:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.10 :: (load (s64) from %fixed-stack.10, align 16)
  ; CHECK:   [[LD19:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.9 :: (load (s64) from %fixed-stack.9)
  ; CHECK:   [[LD20:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.8 :: (load (s64) from %fixed-stack.8, align 16)
  ; CHECK:   [[LD21:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.7 :: (load (s64) from %fixed-stack.7)
  ; CHECK:   [[LD22:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.6 :: (load (s64) from %fixed-stack.6, align 16)
  ; CHECK:   [[LD23:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.5 :: (load (s64) from %fixed-stack.5)
  ; CHECK:   [[LD24:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.4 :: (load (s64) from %fixed-stack.4, align 16)
  ; CHECK:   [[LD25:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.3 :: (load (s64) from %fixed-stack.3)
  ; CHECK:   [[LD26:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.2 :: (load (s64) from %fixed-stack.2, align 16)
  ; CHECK:   [[LD27:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.1 :: (load (s64) from %fixed-stack.1)
  ; CHECK:   [[LD28:%[0-9]+]]:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.0 :: (load (s64) from %fixed-stack.0, align 16)
  ; CHECK:   [[DEF:%[0-9]+]]:g8rc = IMPLICIT_DEF
  ; CHECK:   [[INSERT_SUBREG:%[0-9]+]]:g8rc = INSERT_SUBREG [[DEF]], [[COPY7]], %subreg.sub_32
  ; CHECK:   [[RLDICL:%[0-9]+]]:g8rc = RLDICL killed [[INSERT_SUBREG]], 0, 32
  ; CHECK:   [[ADDI8_:%[0-9]+]]:g8rc = ADDI8 [[COPY5]], -4
  ; CHECK:   [[ADDI8_1:%[0-9]+]]:g8rc = ADDI8 [[COPY2]], -4
  ; CHECK:   [[ADDI8_2:%[0-9]+]]:g8rc = ADDI8 [[COPY1]], -4
  ; CHECK:   MTCTR8loop killed [[RLDICL]], implicit-def dead $ctr8
  ; CHECK:   [[LI:%[0-9]+]]:gprc = LI 0
  ; CHECK:   [[LI8_:%[0-9]+]]:g8rc = LI8 0
  ; CHECK:   [[LIS:%[0-9]+]]:gprc = LIS 34952
  ; CHECK:   [[ORI:%[0-9]+]]:gprc = ORI [[LIS]], 34953
  ; CHECK:   [[CMPLWI:%[0-9]+]]:crrc = CMPLWI [[COPY8]], 3
  ; CHECK:   [[CMPLWI1:%[0-9]+]]:crrc = CMPLWI [[COPY8]], 1
  ; CHECK:   B %bb.3
  ; CHECK: bb.2 (%ir-block.41):
  ; CHECK:   [[LI8_1:%[0-9]+]]:g8rc = LI8 0
  ; CHECK:   $x3 = COPY [[LI8_1]]
  ; CHECK:   BLR8 implicit $lr8, implicit $rm, implicit $x3
  ; CHECK: bb.3 (%ir-block.42):
  ; CHECK:   successors: %bb.6(0x2aaaaaab), %bb.4(0x55555555)
  ; CHECK:   [[PHI:%[0-9]+]]:g8rc_and_g8rc_nox0 = PHI [[LI8_]], %bb.1, %21, %bb.8
  ; CHECK:   [[PHI1:%[0-9]+]]:g8rc_and_g8rc_nox0 = PHI [[LI8_]], %bb.1, %20, %bb.8
  ; CHECK:   [[PHI2:%[0-9]+]]:gprc = PHI [[LI]], %bb.1, %16, %bb.8
  ; CHECK:   [[PHI3:%[0-9]+]]:g8rc_and_g8rc_nox0 = PHI [[ADDI8_]], %bb.1, %13, %bb.8
  ; CHECK:   [[PHI4:%[0-9]+]]:g8rc_and_g8rc_nox0 = PHI [[ADDI8_1]], %bb.1, %11, %bb.8
  ; CHECK:   [[PHI5:%[0-9]+]]:g8rc_and_g8rc_nox0 = PHI [[ADDI8_2]], %bb.1, %9, %bb.8
  ; CHECK:   [[LWZU:%[0-9]+]]:gprc, [[LWZU1:%[0-9]+]]:g8rc_and_g8rc_nox0 = LWZU 4, [[PHI3]] :: (load (s32) from %ir.53, !tbaa !2)
  ; CHECK:   [[COPY9:%[0-9]+]]:gprc_and_gprc_nor0 = COPY [[PHI1]].sub_32
  ; CHECK:   [[ADD4_:%[0-9]+]]:gprc = nsw ADD4 killed [[LWZU]], [[PHI2]]
  ; CHECK:   BCC 76, [[CMPLWI]], %bb.6
  ; CHECK:   B %bb.4
  ; CHECK: bb.4 (%ir-block.42):
  ; CHECK:   successors: %bb.5(0x40000001), %bb.7(0x3fffffff)
  ; CHECK:   BCC 68, [[CMPLWI1]], %bb.7
  ; CHECK:   B %bb.5
  ; CHECK: bb.5 (%ir-block.59):
  ; CHECK:   successors: %bb.8(0x80000000)
  ; CHECK:   [[COPY10:%[0-9]+]]:gprc = COPY [[PHI1]].sub_32
  ; CHECK:   [[RLWINM:%[0-9]+]]:gprc = RLWINM [[COPY10]], 1, 0, 30
  ; CHECK:   B %bb.8
  ; CHECK: bb.6 (%ir-block.62):
  ; CHECK:   successors: %bb.8(0x80000000)
  ; CHECK:   [[ADDI:%[0-9]+]]:gprc = nuw nsw ADDI [[COPY9]], 100
  ; CHECK:   B %bb.8
  ; CHECK: bb.7 (%ir-block.64):
  ; CHECK:   successors: %bb.8(0x80000000)
  ; CHECK:   [[MULHWU:%[0-9]+]]:gprc = MULHWU [[COPY9]], [[ORI]]
  ; CHECK:   [[RLWINM1:%[0-9]+]]:gprc = RLWINM [[MULHWU]], 28, 4, 31
  ; CHECK:   [[MULLI:%[0-9]+]]:gprc = nuw nsw MULLI [[RLWINM1]], 30
  ; CHECK:   [[SUBF:%[0-9]+]]:gprc = SUBF [[MULLI]], [[COPY9]]
  ; CHECK: bb.8 (%ir-block.65):
  ; CHECK:   successors: %bb.3(0x7c000000), %bb.2(0x04000000)
  ; CHECK:   [[PHI6:%[0-9]+]]:gprc = PHI [[ADDI]], %bb.6, [[RLWINM]], %bb.5, [[SUBF]], %bb.7
  ; CHECK:   [[ADDI8_3:%[0-9]+]]:g8rc_and_g8rc_nox0 = ADDI8 [[PHI5]], 4
  ; CHECK:   [[COPY11:%[0-9]+]]:g8rc = COPY [[ADDI8_3]]
  ; CHECK:   [[ADDI8_4:%[0-9]+]]:g8rc_and_g8rc_nox0 = ADDI8 [[PHI4]], 4
  ; CHECK:   [[COPY12:%[0-9]+]]:g8rc = COPY [[ADDI8_4]]
  ; CHECK:   [[COPY13:%[0-9]+]]:g8rc = COPY [[LWZU1]]
  ; CHECK:   [[ADD4_1:%[0-9]+]]:gprc = nsw ADD4 [[PHI6]], [[ADD4_]]
  ; CHECK:   [[LWZ:%[0-9]+]]:gprc = LWZ 0, [[ADDI8_4]] :: (load (s32) from %ir.51, !tbaa !2)
  ; CHECK:   [[ADD4_2:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_1]], killed [[LWZ]]
  ; CHECK:   [[LWZ1:%[0-9]+]]:gprc = LWZ 0, [[ADDI8_3]] :: (load (s32) from %ir.49, !tbaa !2)
  ; CHECK:   [[ADD4_3:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_2]], killed [[LWZ1]]
  ; CHECK:   [[LWZX:%[0-9]+]]:gprc = LWZX [[COPY]], [[PHI]] :: (load (s32) from %ir.uglygep6061, !tbaa !2)
  ; CHECK:   [[ADD4_4:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_3]], killed [[LWZX]]
  ; CHECK:   [[LWZX1:%[0-9]+]]:gprc = LWZX [[LD28]], [[PHI]] :: (load (s32) from %ir.uglygep5859, !tbaa !2)
  ; CHECK:   [[ADD4_5:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_4]], killed [[LWZX1]]
  ; CHECK:   [[LWZX2:%[0-9]+]]:gprc = LWZX [[LD27]], [[PHI]] :: (load (s32) from %ir.uglygep5657, !tbaa !2)
  ; CHECK:   [[ADD4_6:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_5]], killed [[LWZX2]]
  ; CHECK:   [[LWZX3:%[0-9]+]]:gprc = LWZX [[LD26]], [[PHI]] :: (load (s32) from %ir.uglygep5455, !tbaa !2)
  ; CHECK:   [[ADD4_7:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_6]], killed [[LWZX3]]
  ; CHECK:   [[LWZX4:%[0-9]+]]:gprc = LWZX [[LD25]], [[PHI]] :: (load (s32) from %ir.uglygep5253, !tbaa !2)
  ; CHECK:   [[ADD4_8:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_7]], killed [[LWZX4]]
  ; CHECK:   [[LWZX5:%[0-9]+]]:gprc = LWZX [[LD24]], [[PHI]] :: (load (s32) from %ir.uglygep5051, !tbaa !2)
  ; CHECK:   [[ADD4_9:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_8]], killed [[LWZX5]]
  ; CHECK:   [[LWZX6:%[0-9]+]]:gprc = LWZX [[LD23]], [[PHI]] :: (load (s32) from %ir.uglygep4849, !tbaa !2)
  ; CHECK:   [[ADD4_10:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_9]], killed [[LWZX6]]
  ; CHECK:   [[LWZX7:%[0-9]+]]:gprc = LWZX [[LD22]], [[PHI]] :: (load (s32) from %ir.uglygep4647, !tbaa !2)
  ; CHECK:   [[ADD4_11:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_10]], killed [[LWZX7]]
  ; CHECK:   [[LWZX8:%[0-9]+]]:gprc = LWZX [[LD21]], [[PHI]] :: (load (s32) from %ir.uglygep4445, !tbaa !2)
  ; CHECK:   [[ADD4_12:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_11]], killed [[LWZX8]]
  ; CHECK:   [[LWZX9:%[0-9]+]]:gprc = LWZX [[LD20]], [[PHI]] :: (load (s32) from %ir.uglygep4243, !tbaa !2)
  ; CHECK:   [[ADD4_13:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_12]], killed [[LWZX9]]
  ; CHECK:   [[LWZX10:%[0-9]+]]:gprc = LWZX [[LD19]], [[PHI]] :: (load (s32) from %ir.uglygep4041, !tbaa !2)
  ; CHECK:   [[ADD4_14:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_13]], killed [[LWZX10]]
  ; CHECK:   [[LWZX11:%[0-9]+]]:gprc = LWZX [[LD18]], [[PHI]] :: (load (s32) from %ir.uglygep3839, !tbaa !2)
  ; CHECK:   [[ADD4_15:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_14]], killed [[LWZX11]]
  ; CHECK:   [[LWZX12:%[0-9]+]]:gprc = LWZX [[LD17]], [[PHI]] :: (load (s32) from %ir.uglygep3637, !tbaa !2)
  ; CHECK:   [[ADD4_16:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_15]], killed [[LWZX12]]
  ; CHECK:   [[LWZX13:%[0-9]+]]:gprc = LWZX [[LD16]], [[PHI]] :: (load (s32) from %ir.uglygep3435, !tbaa !2)
  ; CHECK:   [[ADD4_17:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_16]], killed [[LWZX13]]
  ; CHECK:   [[LWZX14:%[0-9]+]]:gprc = LWZX [[LD15]], [[PHI]] :: (load (s32) from %ir.uglygep3233, !tbaa !2)
  ; CHECK:   [[ADD4_18:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_17]], killed [[LWZX14]]
  ; CHECK:   [[LWZX15:%[0-9]+]]:gprc = LWZX [[LD14]], [[PHI]] :: (load (s32) from %ir.uglygep3031, !tbaa !2)
  ; CHECK:   [[ADD4_19:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_18]], killed [[LWZX15]]
  ; CHECK:   [[LWZX16:%[0-9]+]]:gprc = LWZX [[LD13]], [[PHI]] :: (load (s32) from %ir.uglygep2829, !tbaa !2)
  ; CHECK:   [[ADD4_20:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_19]], killed [[LWZX16]]
  ; CHECK:   [[LWZX17:%[0-9]+]]:gprc = LWZX [[LD12]], [[PHI]] :: (load (s32) from %ir.uglygep2627, !tbaa !2)
  ; CHECK:   [[ADD4_21:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_20]], killed [[LWZX17]]
  ; CHECK:   [[LWZX18:%[0-9]+]]:gprc = LWZX [[LD11]], [[PHI]] :: (load (s32) from %ir.uglygep2425, !tbaa !2)
  ; CHECK:   [[ADD4_22:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_21]], killed [[LWZX18]]
  ; CHECK:   [[LWZX19:%[0-9]+]]:gprc = LWZX [[LD10]], [[PHI]] :: (load (s32) from %ir.uglygep2223, !tbaa !2)
  ; CHECK:   [[ADD4_23:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_22]], killed [[LWZX19]]
  ; CHECK:   [[LWZX20:%[0-9]+]]:gprc = LWZX [[LD9]], [[PHI]] :: (load (s32) from %ir.uglygep2021, !tbaa !2)
  ; CHECK:   [[ADD4_24:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_23]], killed [[LWZX20]]
  ; CHECK:   [[LWZX21:%[0-9]+]]:gprc = LWZX [[LD8]], [[PHI]] :: (load (s32) from %ir.uglygep1819, !tbaa !2)
  ; CHECK:   [[ADD4_25:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_24]], killed [[LWZX21]]
  ; CHECK:   [[LWZX22:%[0-9]+]]:gprc = LWZX [[LD7]], [[PHI]] :: (load (s32) from %ir.uglygep1617, !tbaa !2)
  ; CHECK:   [[ADD4_26:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_25]], killed [[LWZX22]]
  ; CHECK:   [[LWZX23:%[0-9]+]]:gprc = LWZX [[LD6]], [[PHI]] :: (load (s32) from %ir.uglygep1415, !tbaa !2)
  ; CHECK:   [[ADD4_27:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_26]], killed [[LWZX23]]
  ; CHECK:   [[LWZX24:%[0-9]+]]:gprc = LWZX [[LD5]], [[PHI]] :: (load (s32) from %ir.uglygep1213, !tbaa !2)
  ; CHECK:   [[ADD4_28:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_27]], killed [[LWZX24]]
  ; CHECK:   [[LWZX25:%[0-9]+]]:gprc = LWZX [[LD4]], [[PHI]] :: (load (s32) from %ir.uglygep1011, !tbaa !2)
  ; CHECK:   [[ADD4_29:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_28]], killed [[LWZX25]]
  ; CHECK:   [[LWZX26:%[0-9]+]]:gprc = LWZX [[LD3]], [[PHI]] :: (load (s32) from %ir.uglygep89, !tbaa !2)
  ; CHECK:   [[ADD4_30:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_29]], killed [[LWZX26]]
  ; CHECK:   [[LWZX27:%[0-9]+]]:gprc = LWZX [[LD2]], [[PHI]] :: (load (s32) from %ir.uglygep67, !tbaa !2)
  ; CHECK:   [[ADD4_31:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_30]], killed [[LWZX27]]
  ; CHECK:   [[LWZX28:%[0-9]+]]:gprc = LWZX [[LD1]], [[PHI]] :: (load (s32) from %ir.uglygep45, !tbaa !2)
  ; CHECK:   [[ADD4_32:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_31]], killed [[LWZX28]]
  ; CHECK:   [[LWZX29:%[0-9]+]]:gprc = LWZX [[LD]], [[PHI]] :: (load (s32) from %ir.uglygep23, !tbaa !2)
  ; CHECK:   [[ADD4_33:%[0-9]+]]:gprc = nsw ADD4 killed [[ADD4_32]], killed [[LWZX29]]
  ; CHECK:   STWX killed [[ADD4_33]], [[COPY4]], [[PHI]] :: (store (s32) into %ir.uglygep1, !tbaa !2)
  ; CHECK:   [[ADDI8_5:%[0-9]+]]:g8rc = nuw nsw ADDI8 [[PHI1]], 1
  ; CHECK:   [[ADDI8_6:%[0-9]+]]:g8rc = nuw nsw ADDI8 [[PHI]], 4
  ; CHECK:   BDNZ8 %bb.3, implicit-def dead $ctr8, implicit $ctr8
  ; CHECK:   B %bb.2
  bb.0 (%ir-block.37):
    successors: %bb.1(0x50000000), %bb.2(0x30000000)
    liveins: $x3, $x5, $x6, $x7, $x8, $x9, $x10

    %29:g8rc_and_g8rc_nox0 = COPY $x10
    %28:g8rc_and_g8rc_nox0 = COPY $x9
    %27:g8rc_and_g8rc_nox0 = COPY $x8
    %26:g8rc = COPY $x7
    %25:g8rc_and_g8rc_nox0 = COPY $x6
    %24:g8rc_and_g8rc_nox0 = COPY $x5
    %22:g8rc = COPY $x3
    %30:gprc = COPY %22.sub_32
    %31:gprc = COPY %26.sub_32
    %60:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.0 :: (load (s64) from %fixed-stack.0, align 16)
    %59:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.1 :: (load (s64) from %fixed-stack.1)
    %58:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.2 :: (load (s64) from %fixed-stack.2, align 16)
    %57:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.3 :: (load (s64) from %fixed-stack.3)
    %56:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.4 :: (load (s64) from %fixed-stack.4, align 16)
    %55:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.5 :: (load (s64) from %fixed-stack.5)
    %54:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.6 :: (load (s64) from %fixed-stack.6, align 16)
    %53:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.7 :: (load (s64) from %fixed-stack.7)
    %52:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.8 :: (load (s64) from %fixed-stack.8, align 16)
    %51:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.9 :: (load (s64) from %fixed-stack.9)
    %50:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.10 :: (load (s64) from %fixed-stack.10, align 16)
    %49:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.11 :: (load (s64) from %fixed-stack.11)
    %48:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.12 :: (load (s64) from %fixed-stack.12, align 16)
    %47:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.13 :: (load (s64) from %fixed-stack.13)
    %46:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.14 :: (load (s64) from %fixed-stack.14, align 16)
    %45:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.15 :: (load (s64) from %fixed-stack.15)
    %44:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.16 :: (load (s64) from %fixed-stack.16, align 16)
    %43:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.17 :: (load (s64) from %fixed-stack.17)
    %42:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.18 :: (load (s64) from %fixed-stack.18, align 16)
    %41:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.19 :: (load (s64) from %fixed-stack.19)
    %40:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.20 :: (load (s64) from %fixed-stack.20, align 16)
    %39:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.21 :: (load (s64) from %fixed-stack.21)
    %38:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.22 :: (load (s64) from %fixed-stack.22, align 16)
    %37:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.23 :: (load (s64) from %fixed-stack.23)
    %36:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.24 :: (load (s64) from %fixed-stack.24, align 16)
    %35:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.25 :: (load (s64) from %fixed-stack.25)
    %34:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.26 :: (load (s64) from %fixed-stack.26, align 16)
    %33:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.27 :: (load (s64) from %fixed-stack.27)
    %32:g8rc_and_g8rc_nox0 = LD 0, %fixed-stack.28 :: (load (s64) from %fixed-stack.28, align 16)
    %61:crrc = CMPWI %31, 1
    BCC 12, killed %61, %bb.2
    B %bb.1

  bb.1 (%ir-block.39):
    %65:g8rc = IMPLICIT_DEF
    %64:g8rc = INSERT_SUBREG %65, %31, %subreg.sub_32
    %66:g8rc = RLDICL killed %64, 0, 32
    %0:g8rc = ADDI8 %24, -4
    %1:g8rc = ADDI8 %27, -4
    %2:g8rc = ADDI8 %28, -4
    MTCTR8loop killed %66, implicit-def dead $ctr8
    %63:gprc = LI 0
    %62:g8rc = LI8 0
    %69:gprc = LIS 34952
    %70:gprc = ORI %69, 34953
    %74:crrc = CMPLWI %30, 3
    %75:crrc = CMPLWI %30, 1
    B %bb.3

  bb.2 (%ir-block.41):
    %142:g8rc = LI8 0
    $x3 = COPY %142
    BLR8 implicit $lr8, implicit $rm, implicit $x3

  bb.3 (%ir-block.42):
    successors: %bb.5(0x2aaaaaab), %bb.8(0x55555555)

    %3:g8rc_and_g8rc_nox0 = PHI %62, %bb.1, %21, %bb.7
    %4:g8rc_and_g8rc_nox0 = PHI %62, %bb.1, %20, %bb.7
    %5:gprc = PHI %63, %bb.1, %16, %bb.7
    %6:g8rc_and_g8rc_nox0 = PHI %0, %bb.1, %13, %bb.7
    %7:g8rc_and_g8rc_nox0 = PHI %1, %bb.1, %11, %bb.7
    %8:g8rc_and_g8rc_nox0 = PHI %2, %bb.1, %9, %bb.7
    %10:g8rc_and_g8rc_nox0 = ADDI8 %8, 4
    %9:g8rc = COPY %10
    %12:g8rc_and_g8rc_nox0 = ADDI8 %7, 4
    %11:g8rc = COPY %12
    %67:gprc, %68:g8rc_and_g8rc_nox0 = LWZU 4, %6 :: (load (s32) from %ir.53, !tbaa !2)
    %13:g8rc = COPY %68
    %14:gprc_and_gprc_nor0 = COPY %4.sub_32
    %71:gprc = MULHWU %14, %70
    %72:gprc = RLWINM %71, 28, 4, 31
    %73:gprc = nuw nsw MULLI killed %72, 30
    %15:gprc = SUBF killed %73, %14
    %16:gprc = nsw ADD4 killed %67, %5
    BCC 76, %74, %bb.5
    B %bb.8

  bb.8 (%ir-block.42):
    successors: %bb.4(0x40000001), %bb.6(0x3fffffff)

    BCC 68, %75, %bb.6
    B %bb.4

  bb.4 (%ir-block.59):
    %76:gprc = COPY %4.sub_32
    %17:gprc = RLWINM %76, 1, 0, 30
    B %bb.7

  bb.5 (%ir-block.62):
    %18:gprc = nuw nsw ADDI %14, 100
    B %bb.7

  bb.6 (%ir-block.64):

  bb.7 (%ir-block.65):
    successors: %bb.3(0x7c000000), %bb.2(0x04000000)

    %19:gprc = PHI %18, %bb.5, %17, %bb.4, %15, %bb.6
    %77:gprc = nsw ADD4 %19, %16
    %78:gprc = LWZ 0, %12 :: (load (s32) from %ir.51, !tbaa !2)
    %79:gprc = nsw ADD4 killed %77, killed %78
    %80:gprc = LWZ 0, %10 :: (load (s32) from %ir.49, !tbaa !2)
    %81:gprc = nsw ADD4 killed %79, killed %80
    %82:gprc = LWZX %29, %3 :: (load (s32) from %ir.uglygep6061, !tbaa !2)
    %83:gprc = nsw ADD4 killed %81, killed %82
    %84:gprc = LWZX %32, %3 :: (load (s32) from %ir.uglygep5859, !tbaa !2)
    %85:gprc = nsw ADD4 killed %83, killed %84
    %86:gprc = LWZX %33, %3 :: (load (s32) from %ir.uglygep5657, !tbaa !2)
    %87:gprc = nsw ADD4 killed %85, killed %86
    %88:gprc = LWZX %34, %3 :: (load (s32) from %ir.uglygep5455, !tbaa !2)
    %89:gprc = nsw ADD4 killed %87, killed %88
    %90:gprc = LWZX %35, %3 :: (load (s32) from %ir.uglygep5253, !tbaa !2)
    %91:gprc = nsw ADD4 killed %89, killed %90
    %92:gprc = LWZX %36, %3 :: (load (s32) from %ir.uglygep5051, !tbaa !2)
    %93:gprc = nsw ADD4 killed %91, killed %92
    %94:gprc = LWZX %37, %3 :: (load (s32) from %ir.uglygep4849, !tbaa !2)
    %95:gprc = nsw ADD4 killed %93, killed %94
    %96:gprc = LWZX %38, %3 :: (load (s32) from %ir.uglygep4647, !tbaa !2)
    %97:gprc = nsw ADD4 killed %95, killed %96
    %98:gprc = LWZX %39, %3 :: (load (s32) from %ir.uglygep4445, !tbaa !2)
    %99:gprc = nsw ADD4 killed %97, killed %98
    %100:gprc = LWZX %40, %3 :: (load (s32) from %ir.uglygep4243, !tbaa !2)
    %101:gprc = nsw ADD4 killed %99, killed %100
    %102:gprc = LWZX %41, %3 :: (load (s32) from %ir.uglygep4041, !tbaa !2)
    %103:gprc = nsw ADD4 killed %101, killed %102
    %104:gprc = LWZX %42, %3 :: (load (s32) from %ir.uglygep3839, !tbaa !2)
    %105:gprc = nsw ADD4 killed %103, killed %104
    %106:gprc = LWZX %43, %3 :: (load (s32) from %ir.uglygep3637, !tbaa !2)
    %107:gprc = nsw ADD4 killed %105, killed %106
    %108:gprc = LWZX %44, %3 :: (load (s32) from %ir.uglygep3435, !tbaa !2)
    %109:gprc = nsw ADD4 killed %107, killed %108
    %110:gprc = LWZX %45, %3 :: (load (s32) from %ir.uglygep3233, !tbaa !2)
    %111:gprc = nsw ADD4 killed %109, killed %110
    %112:gprc = LWZX %46, %3 :: (load (s32) from %ir.uglygep3031, !tbaa !2)
    %113:gprc = nsw ADD4 killed %111, killed %112
    %114:gprc = LWZX %47, %3 :: (load (s32) from %ir.uglygep2829, !tbaa !2)
    %115:gprc = nsw ADD4 killed %113, killed %114
    %116:gprc = LWZX %48, %3 :: (load (s32) from %ir.uglygep2627, !tbaa !2)
    %117:gprc = nsw ADD4 killed %115, killed %116
    %118:gprc = LWZX %49, %3 :: (load (s32) from %ir.uglygep2425, !tbaa !2)
    %119:gprc = nsw ADD4 killed %117, killed %118
    %120:gprc = LWZX %50, %3 :: (load (s32) from %ir.uglygep2223, !tbaa !2)
    %121:gprc = nsw ADD4 killed %119, killed %120
    %122:gprc = LWZX %51, %3 :: (load (s32) from %ir.uglygep2021, !tbaa !2)
    %123:gprc = nsw ADD4 killed %121, killed %122
    %124:gprc = LWZX %52, %3 :: (load (s32) from %ir.uglygep1819, !tbaa !2)
    %125:gprc = nsw ADD4 killed %123, killed %124
    %126:gprc = LWZX %53, %3 :: (load (s32) from %ir.uglygep1617, !tbaa !2)
    %127:gprc = nsw ADD4 killed %125, killed %126
    %128:gprc = LWZX %54, %3 :: (load (s32) from %ir.uglygep1415, !tbaa !2)
    %129:gprc = nsw ADD4 killed %127, killed %128
    %130:gprc = LWZX %55, %3 :: (load (s32) from %ir.uglygep1213, !tbaa !2)
    %131:gprc = nsw ADD4 killed %129, killed %130
    %132:gprc = LWZX %56, %3 :: (load (s32) from %ir.uglygep1011, !tbaa !2)
    %133:gprc = nsw ADD4 killed %131, killed %132
    %134:gprc = LWZX %57, %3 :: (load (s32) from %ir.uglygep89, !tbaa !2)
    %135:gprc = nsw ADD4 killed %133, killed %134
    %136:gprc = LWZX %58, %3 :: (load (s32) from %ir.uglygep67, !tbaa !2)
    %137:gprc = nsw ADD4 killed %135, killed %136
    %138:gprc = LWZX %59, %3 :: (load (s32) from %ir.uglygep45, !tbaa !2)
    %139:gprc = nsw ADD4 killed %137, killed %138
    %140:gprc = LWZX %60, %3 :: (load (s32) from %ir.uglygep23, !tbaa !2)
    %141:gprc = nsw ADD4 killed %139, killed %140
    STWX killed %141, %25, %3 :: (store (s32) into %ir.uglygep1, !tbaa !2)
    %20:g8rc = nuw nsw ADDI8 %4, 1
    %21:g8rc = nuw nsw ADDI8 %3, 4
    BDNZ8 %bb.3, implicit-def dead $ctr8, implicit $ctr8
    B %bb.2

...
