| # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py |
| # RUN: llc -mtriple aarch64 -run-pass=aarch64-prelegalizer-combiner -verify-machineinstrs %s -o - | FileCheck %s |
| |
| name: or_same |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| liveins: $x0 |
| |
| ; Fold: x or x -> x |
| ; CHECK-LABEL: name: or_same |
| ; CHECK: liveins: $x0 |
| ; CHECK: %copy:_(s64) = COPY $x0 |
| ; CHECK: $x0 = COPY %copy(s64) |
| ; CHECK: RET_ReallyLR implicit $x0 |
| %copy:_(s64) = COPY $x0 |
| %or:_(s64) = G_OR %copy, %copy |
| $x0 = COPY %or(s64) |
| RET_ReallyLR implicit $x0 |
| |
| ... |
| --- |
| name: and_same |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| liveins: $x0 |
| |
| ; Fold: x and x -> x |
| |
| ; CHECK-LABEL: name: and_same |
| ; CHECK: liveins: $x0 |
| ; CHECK: %copy:_(s64) = COPY $x0 |
| ; CHECK: $x0 = COPY %copy(s64) |
| ; CHECK: RET_ReallyLR implicit $x0 |
| %copy:_(s64) = COPY $x0 |
| %and:_(s64) = G_AND %copy, %copy |
| $x0 = COPY %and(s64) |
| RET_ReallyLR implicit $x0 |
| |
| ... |
| --- |
| name: and_same2 |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| liveins: $x0, $x1 |
| |
| ; We can fold when the LHS and RHS are guaranteed to be identical. |
| |
| ; CHECK-LABEL: name: and_same2 |
| ; CHECK: liveins: $x0, $x1 |
| ; CHECK: %copy1:_(s64) = COPY $x0 |
| ; CHECK: %copy2:_(s64) = COPY $x1 |
| ; CHECK: %or:_(s64) = G_OR %copy1, %copy2 |
| ; CHECK: $x0 = COPY %or(s64) |
| ; CHECK: RET_ReallyLR implicit $x0 |
| %copy1:_(s64) = COPY $x0 |
| %copy2:_(s64) = COPY $x1 |
| %or:_(s64) = G_OR %copy1, %copy2 |
| %same_as_or:_(s64) = COPY %or(s64) |
| %and:_(s64) = G_AND %or, %same_as_or |
| $x0 = COPY %and(s64) |
| RET_ReallyLR implicit $x0 |
| |
| ... |
| --- |
| name: or_and_not_same |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| liveins: $x0, $x1, $x2 |
| |
| ; None of the G_ORs or G_ANDs should be eliminated here, because their LHS |
| ; and RHS values are different. |
| |
| ; CHECK-LABEL: name: or_and_not_same |
| ; CHECK: liveins: $x0, $x1, $x2 |
| ; CHECK: %copy1:_(s64) = COPY $x0 |
| ; CHECK: %copy2:_(s64) = COPY $x1 |
| ; CHECK: %copy3:_(s64) = COPY $x2 |
| ; CHECK: %or1:_(s64) = G_OR %copy1, %copy2 |
| ; CHECK: %or2:_(s64) = G_OR %copy1, %copy3 |
| ; CHECK: %and:_(s64) = G_AND %or1, %or2 |
| ; CHECK: $x0 = COPY %and(s64) |
| ; CHECK: RET_ReallyLR implicit $x0 |
| %copy1:_(s64) = COPY $x0 |
| %copy2:_(s64) = COPY $x1 |
| %copy3:_(s64) = COPY $x2 |
| %or1:_(s64) = G_OR %copy1, %copy2 |
| %or2:_(s64) = G_OR %copy1, %copy3 |
| %and:_(s64) = G_AND %or1, %or2 |
| $x0 = COPY %and(s64) |
| RET_ReallyLR implicit $x0 |
| |
| ... |