blob: 3079acd5f514617c871f09058dd68d7f73d7a445 [file] [log] [blame]
; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --function-signature --scrub-attributes
; Check that we can fold operations with (gep null) inputs.
; Note: the LLParser already does some constant folding, check that output first:
; RUN: opt -S -passes=verify < %s | FileCheck %s --check-prefixes=ALL,LLPARSER
; We should be able to fold almost everything in InstSimplify other than the final test which requries InstCombine
; RUN: opt -S -passes=instsimplify < %s | FileCheck %s --allow-unused-prefixes --check-prefixes=ALL,CHECK,INTEGRAL,INSTSIMPLIFY,INTEGRAL-INSTSIMPLIFY
; RUN: opt -S -passes=instcombine < %s | FileCheck %s --allow-unused-prefixes --check-prefixes=ALL,CHECK,INTEGRAL,INSTCOMBINE,INTEGRAL-INSTCOMBINE
; Non-integral pointers limit certain transformations on pointers:
; RUN: sed -e 's/p:64:64:64:64/p:64:64:64:64-ni:1/g' %s | opt -S -passes=instsimplify | \
; RUN: FileCheck %s --allow-unused-prefixes --check-prefixes=ALL,CHECK,NONINTEGRAL,INSTSIMPLIFY,NONINTEGRAL-INSTSIMPLIFY
; RUN: sed -e 's/p:64:64:64:64/p:64:64:64:64-ni:1/g' %s | opt -S -passes=instcombine | \
; RUN: FileCheck %s --allow-unused-prefixes --check-prefixes=ALL,CHECK,NONINTEGRAL,INSTCOMBINE,NONINTEGRAL-INSTCOMBINE
target datalayout = "p:64:64:64:64"
declare void @use_i64(i64)
declare void @use_ptr(i8 addrspace(1)*)
define i64 @constant_fold_ptrtoint_gep_zero() {
; ALL-LABEL: define {{[^@]+}}@constant_fold_ptrtoint_gep_zero() {
; ALL-NEXT: ret i64 0
;
ret i64 ptrtoint (i32 addrspace(1)* getelementptr (i32, i32 addrspace(1)* null, i64 0) to i64)
}
define i64 @constant_fold_ptrtoint_gep_nonzero() {
; LLPARSER-LABEL: define {{[^@]+}}@constant_fold_ptrtoint_gep_nonzero() {
; LLPARSER-NEXT: ret i64 ptrtoint (i32 addrspace(1)* getelementptr (i32, i32 addrspace(1)* null, i64 1234) to i64)
;
; INSTSIMPLIFY-LABEL: define {{[^@]+}}@constant_fold_ptrtoint_gep_nonzero() {
; INSTSIMPLIFY-NEXT: ret i64 ptrtoint (i32 addrspace(1)* getelementptr (i32, i32 addrspace(1)* null, i64 1234) to i64)
;
; INSTCOMBINE-LABEL: define {{[^@]+}}@constant_fold_ptrtoint_gep_nonzero() {
; INSTCOMBINE-NEXT: ret i64 4936
;
ret i64 ptrtoint (i32 addrspace(1)* getelementptr (i32, i32 addrspace(1)* null, i64 1234) to i64)
}
define i64 @constant_fold_ptrtoint_gep_zero_inbounds() {
; ALL-LABEL: define {{[^@]+}}@constant_fold_ptrtoint_gep_zero_inbounds() {
; ALL-NEXT: ret i64 0
;
ret i64 ptrtoint (i32 addrspace(1)* getelementptr inbounds (i32, i32 addrspace(1)* null, i64 0) to i64)
}
; In theory we could fold this to poison/null, but that would break offsetof
; implementations that don't use __builtin_offsetof.
; TODO: should Clang special case ((INTEGER)&((TYPE *)0)->MEMBER) to emit a non-inbounds GEP?
define i64 @constant_fold_ptrtoint_gep_nonzero_inbounds() {
; LLPARSER-LABEL: define {{[^@]+}}@constant_fold_ptrtoint_gep_nonzero_inbounds() {
; LLPARSER-NEXT: ret i64 ptrtoint (i32 addrspace(1)* getelementptr inbounds (i32, i32 addrspace(1)* null, i64 1234) to i64)
;
; INSTSIMPLIFY-LABEL: define {{[^@]+}}@constant_fold_ptrtoint_gep_nonzero_inbounds() {
; INSTSIMPLIFY-NEXT: ret i64 ptrtoint (i32 addrspace(1)* getelementptr inbounds (i32, i32 addrspace(1)* null, i64 1234) to i64)
;
; INSTCOMBINE-LABEL: define {{[^@]+}}@constant_fold_ptrtoint_gep_nonzero_inbounds() {
; INSTCOMBINE-NEXT: ret i64 4936
;
ret i64 ptrtoint (i32 addrspace(1)* getelementptr inbounds (i32, i32 addrspace(1)* null, i64 1234) to i64)
}
; Check all combinations of inbounds+non-inbounds GEP with the outer GEP having a non-zero offset
define void @constant_fold_ptrtoint_of_gep_of_nullgep() {
; LLPARSER-LABEL: define {{[^@]+}}@constant_fold_ptrtoint_of_gep_of_nullgep() {
; LLPARSER-NEXT: call void @use_i64(i64 ptrtoint (i8 addrspace(1)* getelementptr inbounds (i8, i8 addrspace(1)* null, i64 1234) to i64))
; LLPARSER-NEXT: call void @use_i64(i64 ptrtoint (i8 addrspace(1)* getelementptr (i8, i8 addrspace(1)* null, i64 1234) to i64))
; LLPARSER-NEXT: call void @use_i64(i64 ptrtoint (i8 addrspace(1)* getelementptr inbounds (i8, i8 addrspace(1)* null, i64 1234) to i64))
; LLPARSER-NEXT: call void @use_i64(i64 ptrtoint (i8 addrspace(1)* getelementptr (i8, i8 addrspace(1)* null, i64 1234) to i64))
; LLPARSER-NEXT: call void @use_i64(i64 ptrtoint (i8 addrspace(1)* getelementptr inbounds (i8, i8 addrspace(1)* null, i64 1234) to i64))
; LLPARSER-NEXT: call void @use_i64(i64 ptrtoint (i8 addrspace(1)* getelementptr inbounds (i8, i8 addrspace(1)* null, i64 1234) to i64))
; LLPARSER-NEXT: call void @use_i64(i64 ptrtoint (i8 addrspace(1)* getelementptr (i8, i8 addrspace(1)* null, i64 1234) to i64))
; LLPARSER-NEXT: call void @use_i64(i64 ptrtoint (i8 addrspace(1)* getelementptr (i8, i8 addrspace(1)* null, i64 1234) to i64))
; LLPARSER-NEXT: call void @use_i64(i64 0)
; LLPARSER-NEXT: call void @use_i64(i64 0)
; LLPARSER-NEXT: call void @use_i64(i64 0)
; LLPARSER-NEXT: call void @use_i64(i64 0)
; LLPARSER-NEXT: ret void
;
; INSTSIMPLIFY-LABEL: define {{[^@]+}}@constant_fold_ptrtoint_of_gep_of_nullgep() {
; INSTSIMPLIFY-NEXT: call void @use_i64(i64 ptrtoint (i8 addrspace(1)* getelementptr inbounds (i8, i8 addrspace(1)* null, i64 1234) to i64))
; INSTSIMPLIFY-NEXT: call void @use_i64(i64 ptrtoint (i8 addrspace(1)* getelementptr (i8, i8 addrspace(1)* null, i64 1234) to i64))
; INSTSIMPLIFY-NEXT: call void @use_i64(i64 ptrtoint (i8 addrspace(1)* getelementptr inbounds (i8, i8 addrspace(1)* null, i64 1234) to i64))
; INSTSIMPLIFY-NEXT: call void @use_i64(i64 ptrtoint (i8 addrspace(1)* getelementptr (i8, i8 addrspace(1)* null, i64 1234) to i64))
; INSTSIMPLIFY-NEXT: call void @use_i64(i64 ptrtoint (i8 addrspace(1)* getelementptr inbounds (i8, i8 addrspace(1)* null, i64 1234) to i64))
; INSTSIMPLIFY-NEXT: call void @use_i64(i64 ptrtoint (i8 addrspace(1)* getelementptr inbounds (i8, i8 addrspace(1)* null, i64 1234) to i64))
; INSTSIMPLIFY-NEXT: call void @use_i64(i64 ptrtoint (i8 addrspace(1)* getelementptr (i8, i8 addrspace(1)* null, i64 1234) to i64))
; INSTSIMPLIFY-NEXT: call void @use_i64(i64 ptrtoint (i8 addrspace(1)* getelementptr (i8, i8 addrspace(1)* null, i64 1234) to i64))
; INSTSIMPLIFY-NEXT: call void @use_i64(i64 0)
; INSTSIMPLIFY-NEXT: call void @use_i64(i64 0)
; INSTSIMPLIFY-NEXT: call void @use_i64(i64 0)
; INSTSIMPLIFY-NEXT: call void @use_i64(i64 0)
; INSTSIMPLIFY-NEXT: ret void
;
; INSTCOMBINE-LABEL: define {{[^@]+}}@constant_fold_ptrtoint_of_gep_of_nullgep() {
; INSTCOMBINE-NEXT: call void @use_i64(i64 1234)
; INSTCOMBINE-NEXT: call void @use_i64(i64 1234)
; INSTCOMBINE-NEXT: call void @use_i64(i64 1234)
; INSTCOMBINE-NEXT: call void @use_i64(i64 1234)
; INSTCOMBINE-NEXT: call void @use_i64(i64 1234)
; INSTCOMBINE-NEXT: call void @use_i64(i64 1234)
; INSTCOMBINE-NEXT: call void @use_i64(i64 1234)
; INSTCOMBINE-NEXT: call void @use_i64(i64 1234)
; INSTCOMBINE-NEXT: call void @use_i64(i64 0)
; INSTCOMBINE-NEXT: call void @use_i64(i64 0)
; INSTCOMBINE-NEXT: call void @use_i64(i64 0)
; INSTCOMBINE-NEXT: call void @use_i64(i64 0)
; INSTCOMBINE-NEXT: ret void
;
call void @use_i64(i64 ptrtoint (i8 addrspace(1)* getelementptr inbounds (i8, i8 addrspace(1)* getelementptr inbounds (i8, i8 addrspace(1)* null, i64 0), i64 1234) to i64))
call void @use_i64(i64 ptrtoint (i8 addrspace(1)* getelementptr (i8, i8 addrspace(1)* getelementptr inbounds (i8, i8 addrspace(1)* null, i64 0), i64 1234) to i64))
call void @use_i64(i64 ptrtoint (i8 addrspace(1)* getelementptr inbounds (i8, i8 addrspace(1)* getelementptr (i8, i8 addrspace(1)* null, i64 0), i64 1234) to i64))
call void @use_i64(i64 ptrtoint (i8 addrspace(1)* getelementptr (i8, i8 addrspace(1)* getelementptr (i8, i8 addrspace(1)* null, i64 0), i64 1234) to i64))
; Same again but this time with the inner GEP using the non-zero offset
call void @use_i64(i64 ptrtoint (i8 addrspace(1)* getelementptr inbounds (i8, i8 addrspace(1)* getelementptr inbounds (i8, i8 addrspace(1)* null, i64 1234), i64 0) to i64))
call void @use_i64(i64 ptrtoint (i8 addrspace(1)* getelementptr (i8, i8 addrspace(1)* getelementptr inbounds (i8, i8 addrspace(1)* null, i64 1234), i64 0) to i64))
call void @use_i64(i64 ptrtoint (i8 addrspace(1)* getelementptr inbounds (i8, i8 addrspace(1)* getelementptr (i8, i8 addrspace(1)* null, i64 1234), i64 0) to i64))
call void @use_i64(i64 ptrtoint (i8 addrspace(1)* getelementptr (i8, i8 addrspace(1)* getelementptr (i8, i8 addrspace(1)* null, i64 1234), i64 0) to i64))
; And finally with two constants that sum to zero
call void @use_i64(i64 ptrtoint (i8 addrspace(1)* getelementptr inbounds (i8, i8 addrspace(1)* getelementptr inbounds (i8, i8 addrspace(1)* null, i64 -1), i64 1) to i64))
call void @use_i64(i64 ptrtoint (i8 addrspace(1)* getelementptr (i8, i8 addrspace(1)* getelementptr inbounds (i8, i8 addrspace(1)* null, i64 -1), i64 1) to i64))
call void @use_i64(i64 ptrtoint (i8 addrspace(1)* getelementptr inbounds (i8, i8 addrspace(1)* getelementptr (i8, i8 addrspace(1)* null, i64 -1), i64 1) to i64))
call void @use_i64(i64 ptrtoint (i8 addrspace(1)* getelementptr (i8, i8 addrspace(1)* getelementptr (i8, i8 addrspace(1)* null, i64 -1), i64 1) to i64))
ret void
}
; Another set of tests for instructions instead of constants
define i64 @fold_ptrtoint_nullgep_zero() {
; LLPARSER-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_zero() {
; LLPARSER-NEXT: [[OFFSET:%.*]] = add i64 0, 0
; LLPARSER-NEXT: [[PTR:%.*]] = getelementptr i8, i8 addrspace(1)* null, i64 [[OFFSET]]
; LLPARSER-NEXT: [[RET:%.*]] = ptrtoint i8 addrspace(1)* [[PTR]] to i64
; LLPARSER-NEXT: ret i64 [[RET]]
;
; CHECK-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_zero() {
; CHECK-NEXT: ret i64 0
;
%offset = add i64 0, 0
%ptr = getelementptr i8, i8 addrspace(1)* null, i64 %offset
%ret = ptrtoint i8 addrspace(1)* %ptr to i64
ret i64 %ret
}
define i64 @fold_ptrtoint_nullgep_zero_inbounds() {
; LLPARSER-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_zero_inbounds() {
; LLPARSER-NEXT: [[OFFSET:%.*]] = add i64 0, 0
; LLPARSER-NEXT: [[PTR:%.*]] = getelementptr inbounds i8, i8 addrspace(1)* null, i64 [[OFFSET]]
; LLPARSER-NEXT: [[RET:%.*]] = ptrtoint i8 addrspace(1)* [[PTR]] to i64
; LLPARSER-NEXT: ret i64 [[RET]]
;
; CHECK-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_zero_inbounds() {
; CHECK-NEXT: ret i64 0
;
%offset = add i64 0, 0
%ptr = getelementptr inbounds i8, i8 addrspace(1)* null, i64 %offset
%ret = ptrtoint i8 addrspace(1)* %ptr to i64
ret i64 %ret
}
define i64 @fold_ptrtoint_nullgep_nonzero() {
; LLPARSER-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_nonzero() {
; LLPARSER-NEXT: [[OFFSET:%.*]] = add i64 1234, 0
; LLPARSER-NEXT: [[PTR:%.*]] = getelementptr i8, i8 addrspace(1)* null, i64 [[OFFSET]]
; LLPARSER-NEXT: [[RET:%.*]] = ptrtoint i8 addrspace(1)* [[PTR]] to i64
; LLPARSER-NEXT: ret i64 [[RET]]
;
; CHECK-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_nonzero() {
; CHECK-NEXT: ret i64 1234
;
%offset = add i64 1234, 0
%ptr = getelementptr i8, i8 addrspace(1)* null, i64 %offset
%ret = ptrtoint i8 addrspace(1)* %ptr to i64
ret i64 %ret
}
; Inbounds constant null-GEP with non-zero could be constant-folded to null/poison,
; but folding it to the value makes ((INTEGER)&((TYPE *)0)->MEMBER) work.
define i64 @fold_ptrtoint_nullgep_nonzero_inbounds() {
; LLPARSER-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_nonzero_inbounds() {
; LLPARSER-NEXT: [[OFFSET:%.*]] = add i64 1234, 0
; LLPARSER-NEXT: [[PTR:%.*]] = getelementptr inbounds i8, i8 addrspace(1)* null, i64 [[OFFSET]]
; LLPARSER-NEXT: [[RET:%.*]] = ptrtoint i8 addrspace(1)* [[PTR]] to i64
; LLPARSER-NEXT: ret i64 [[RET]]
;
; CHECK-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_nonzero_inbounds() {
; CHECK-NEXT: ret i64 1234
;
%offset = add i64 1234, 0
%ptr = getelementptr inbounds i8, i8 addrspace(1)* null, i64 %offset
%ret = ptrtoint i8 addrspace(1)* %ptr to i64
ret i64 %ret
}
; We should be able to fold ptrtoint(gep null, x) to x
define i64 @fold_ptrtoint_nullgep_variable(i64 %val) {
; LLPARSER-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_variable
; LLPARSER-SAME: (i64 [[VAL:%.*]]) {
; LLPARSER-NEXT: [[PTR:%.*]] = getelementptr i8, i8 addrspace(1)* null, i64 [[VAL]]
; LLPARSER-NEXT: [[RET:%.*]] = ptrtoint i8 addrspace(1)* [[PTR]] to i64
; LLPARSER-NEXT: ret i64 [[RET]]
;
; INSTSIMPLIFY-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_variable
; INSTSIMPLIFY-SAME: (i64 [[VAL:%.*]]) {
; INSTSIMPLIFY-NEXT: [[PTR:%.*]] = getelementptr i8, i8 addrspace(1)* null, i64 [[VAL]]
; INSTSIMPLIFY-NEXT: [[RET:%.*]] = ptrtoint i8 addrspace(1)* [[PTR]] to i64
; INSTSIMPLIFY-NEXT: ret i64 [[RET]]
;
; INSTCOMBINE-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_variable
; INSTCOMBINE-SAME: (i64 [[VAL:%.*]]) {
; INSTCOMBINE-NEXT: ret i64 [[VAL]]
;
%ptr = getelementptr i8, i8 addrspace(1)* null, i64 %val
%ret = ptrtoint i8 addrspace(1)* %ptr to i64
ret i64 %ret
}
; Inbounds null-GEP with non-zero offset could be folded to poison/null.
define i64 @fold_ptrtoint_nullgep_variable_known_nonzero(i64 %val) {
; LLPARSER-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_variable_known_nonzero
; LLPARSER-SAME: (i64 [[VAL:%.*]]) {
; LLPARSER-NEXT: [[NON_ZERO_OFFSET:%.*]] = or i64 [[VAL]], 1
; LLPARSER-NEXT: [[PTR:%.*]] = getelementptr i8, i8 addrspace(1)* null, i64 [[NON_ZERO_OFFSET]]
; LLPARSER-NEXT: [[RET:%.*]] = ptrtoint i8 addrspace(1)* [[PTR]] to i64
; LLPARSER-NEXT: ret i64 [[RET]]
;
; INSTSIMPLIFY-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_variable_known_nonzero
; INSTSIMPLIFY-SAME: (i64 [[VAL:%.*]]) {
; INSTSIMPLIFY-NEXT: [[NON_ZERO_OFFSET:%.*]] = or i64 [[VAL]], 1
; INSTSIMPLIFY-NEXT: [[PTR:%.*]] = getelementptr i8, i8 addrspace(1)* null, i64 [[NON_ZERO_OFFSET]]
; INSTSIMPLIFY-NEXT: [[RET:%.*]] = ptrtoint i8 addrspace(1)* [[PTR]] to i64
; INSTSIMPLIFY-NEXT: ret i64 [[RET]]
;
; INSTCOMBINE-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_variable_known_nonzero
; INSTCOMBINE-SAME: (i64 [[VAL:%.*]]) {
; INSTCOMBINE-NEXT: [[NON_ZERO_OFFSET:%.*]] = or i64 [[VAL]], 1
; INSTCOMBINE-NEXT: ret i64 [[NON_ZERO_OFFSET]]
;
%non_zero_offset = or i64 %val, 1
%ptr = getelementptr i8, i8 addrspace(1)* null, i64 %non_zero_offset
%ret = ptrtoint i8 addrspace(1)* %ptr to i64
ret i64 %ret
}
; This is only valid if %val is zero so we could fold the result to 0.
define i64 @fold_ptrtoint_nullgep_variable_inbounds(i64 %val) {
; LLPARSER-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_variable_inbounds
; LLPARSER-SAME: (i64 [[VAL:%.*]]) {
; LLPARSER-NEXT: [[PTR:%.*]] = getelementptr inbounds i8, i8 addrspace(1)* null, i64 [[VAL]]
; LLPARSER-NEXT: [[RET:%.*]] = ptrtoint i8 addrspace(1)* [[PTR]] to i64
; LLPARSER-NEXT: ret i64 [[RET]]
;
; INSTSIMPLIFY-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_variable_inbounds
; INSTSIMPLIFY-SAME: (i64 [[VAL:%.*]]) {
; INSTSIMPLIFY-NEXT: [[PTR:%.*]] = getelementptr inbounds i8, i8 addrspace(1)* null, i64 [[VAL]]
; INSTSIMPLIFY-NEXT: [[RET:%.*]] = ptrtoint i8 addrspace(1)* [[PTR]] to i64
; INSTSIMPLIFY-NEXT: ret i64 [[RET]]
;
; INSTCOMBINE-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_variable_inbounds
; INSTCOMBINE-SAME: (i64 [[VAL:%.*]]) {
; INSTCOMBINE-NEXT: ret i64 [[VAL]]
;
%ptr = getelementptr inbounds i8, i8 addrspace(1)* null, i64 %val
%ret = ptrtoint i8 addrspace(1)* %ptr to i64
ret i64 %ret
}
; A non-constant but known-non-zero GEP could be folded to poison/null
define i64 @fold_ptrtoint_nullgep_variable_known_nonzero_inbounds(i64 %val) {
; LLPARSER-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_variable_known_nonzero_inbounds
; LLPARSER-SAME: (i64 [[VAL:%.*]]) {
; LLPARSER-NEXT: [[NON_ZERO_OFFSET:%.*]] = or i64 [[VAL]], 1
; LLPARSER-NEXT: [[PTR:%.*]] = getelementptr inbounds i8, i8 addrspace(1)* null, i64 [[NON_ZERO_OFFSET]]
; LLPARSER-NEXT: [[RET:%.*]] = ptrtoint i8 addrspace(1)* [[PTR]] to i64
; LLPARSER-NEXT: ret i64 [[RET]]
;
; INSTSIMPLIFY-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_variable_known_nonzero_inbounds
; INSTSIMPLIFY-SAME: (i64 [[VAL:%.*]]) {
; INSTSIMPLIFY-NEXT: [[NON_ZERO_OFFSET:%.*]] = or i64 [[VAL]], 1
; INSTSIMPLIFY-NEXT: [[PTR:%.*]] = getelementptr inbounds i8, i8 addrspace(1)* null, i64 [[NON_ZERO_OFFSET]]
; INSTSIMPLIFY-NEXT: [[RET:%.*]] = ptrtoint i8 addrspace(1)* [[PTR]] to i64
; INSTSIMPLIFY-NEXT: ret i64 [[RET]]
;
; INSTCOMBINE-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_variable_known_nonzero_inbounds
; INSTCOMBINE-SAME: (i64 [[VAL:%.*]]) {
; INSTCOMBINE-NEXT: [[NON_ZERO_OFFSET:%.*]] = or i64 [[VAL]], 1
; INSTCOMBINE-NEXT: ret i64 [[NON_ZERO_OFFSET]]
;
%non_zero_offset = or i64 %val, 1
%ptr = getelementptr inbounds i8, i8 addrspace(1)* null, i64 %non_zero_offset
%ret = ptrtoint i8 addrspace(1)* %ptr to i64
ret i64 %ret
}
; A non-constant but known-non-zero GEP could be folded to poison/null
define i64 @fold_ptrtoint_nullgep_variable_known_nonzero_inbounds_multiple_indices(i64 %val) {
; LLPARSER-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_variable_known_nonzero_inbounds_multiple_indices
; LLPARSER-SAME: (i64 [[VAL:%.*]]) {
; LLPARSER-NEXT: [[NON_ZERO_OFFSET:%.*]] = or i64 [[VAL]], 1
; LLPARSER-NEXT: [[PTR:%.*]] = getelementptr inbounds [2 x i8], [2 x i8] addrspace(1)* null, i64 [[NON_ZERO_OFFSET]], i32 1
; LLPARSER-NEXT: [[RET:%.*]] = ptrtoint i8 addrspace(1)* [[PTR]] to i64
; LLPARSER-NEXT: ret i64 [[RET]]
;
; INSTSIMPLIFY-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_variable_known_nonzero_inbounds_multiple_indices
; INSTSIMPLIFY-SAME: (i64 [[VAL:%.*]]) {
; INSTSIMPLIFY-NEXT: [[NON_ZERO_OFFSET:%.*]] = or i64 [[VAL]], 1
; INSTSIMPLIFY-NEXT: [[PTR:%.*]] = getelementptr inbounds [2 x i8], [2 x i8] addrspace(1)* null, i64 [[NON_ZERO_OFFSET]], i32 1
; INSTSIMPLIFY-NEXT: [[RET:%.*]] = ptrtoint i8 addrspace(1)* [[PTR]] to i64
; INSTSIMPLIFY-NEXT: ret i64 [[RET]]
;
; INSTCOMBINE-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_variable_known_nonzero_inbounds_multiple_indices
; INSTCOMBINE-SAME: (i64 [[VAL:%.*]]) {
; INSTCOMBINE-NEXT: [[NON_ZERO_OFFSET:%.*]] = shl i64 [[VAL]], 1
; INSTCOMBINE-NEXT: [[PTR_OFFS:%.*]] = or i64 [[NON_ZERO_OFFSET]], 3
; INSTCOMBINE-NEXT: ret i64 [[PTR_OFFS]]
;
%non_zero_offset = or i64 %val, 1
%ptr = getelementptr inbounds [2 x i8], [2 x i8] addrspace(1)* null, i64 %non_zero_offset, i32 1
%ret = ptrtoint i8 addrspace(1)* %ptr to i64
ret i64 %ret
}
; We can't fold non-i8 GEPs in InstSimplify since that would require adding new arithmetic.
; However, InstCombine can decompose the null gep and convert it to a shift.
define i64 @fold_ptrtoint_nullgep_i32_variable(i64 %val) {
; LLPARSER-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_i32_variable
; LLPARSER-SAME: (i64 [[VAL:%.*]]) {
; LLPARSER-NEXT: [[PTR:%.*]] = getelementptr i32, i32 addrspace(1)* null, i64 [[VAL]]
; LLPARSER-NEXT: [[RET:%.*]] = ptrtoint i32 addrspace(1)* [[PTR]] to i64
; LLPARSER-NEXT: ret i64 [[RET]]
;
; INSTSIMPLIFY-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_i32_variable
; INSTSIMPLIFY-SAME: (i64 [[VAL:%.*]]) {
; INSTSIMPLIFY-NEXT: [[PTR:%.*]] = getelementptr i32, i32 addrspace(1)* null, i64 [[VAL]]
; INSTSIMPLIFY-NEXT: [[RET:%.*]] = ptrtoint i32 addrspace(1)* [[PTR]] to i64
; INSTSIMPLIFY-NEXT: ret i64 [[RET]]
;
; INSTCOMBINE-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_i32_variable
; INSTCOMBINE-SAME: (i64 [[VAL:%.*]]) {
; INSTCOMBINE-NEXT: [[PTR_IDX:%.*]] = shl i64 [[VAL]], 2
; INSTCOMBINE-NEXT: ret i64 [[PTR_IDX]]
;
%ptr = getelementptr i32, i32 addrspace(1)* null, i64 %val
%ret = ptrtoint i32 addrspace(1)* %ptr to i64
ret i64 %ret
}
; ptrtoint type does not match index type so requires requite a new trunc instruction
define i32 @fold_ptrtoint_nullgep_variable_trunc(i64 %val) {
; LLPARSER-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_variable_trunc
; LLPARSER-SAME: (i64 [[VAL:%.*]]) {
; LLPARSER-NEXT: [[PTR:%.*]] = getelementptr i8, i8 addrspace(1)* null, i64 [[VAL]]
; LLPARSER-NEXT: [[RET:%.*]] = ptrtoint i8 addrspace(1)* [[PTR]] to i32
; LLPARSER-NEXT: ret i32 [[RET]]
;
; INSTSIMPLIFY-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_variable_trunc
; INSTSIMPLIFY-SAME: (i64 [[VAL:%.*]]) {
; INSTSIMPLIFY-NEXT: [[PTR:%.*]] = getelementptr i8, i8 addrspace(1)* null, i64 [[VAL]]
; INSTSIMPLIFY-NEXT: [[RET:%.*]] = ptrtoint i8 addrspace(1)* [[PTR]] to i32
; INSTSIMPLIFY-NEXT: ret i32 [[RET]]
;
; INSTCOMBINE-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_variable_trunc
; INSTCOMBINE-SAME: (i64 [[VAL:%.*]]) {
; INSTCOMBINE-NEXT: [[RET:%.*]] = trunc i64 [[VAL]] to i32
; INSTCOMBINE-NEXT: ret i32 [[RET]]
;
%ptr = getelementptr i8, i8 addrspace(1)* null, i64 %val
%ret = ptrtoint i8 addrspace(1)* %ptr to i32
ret i32 %ret
}
; For the following three tests, we could fold the result to poison/null since there is at least
; one inbounds GEP on null with a non-zero offset.
define i64 @fold_ptrtoint_zero_nullgep_of_nonzero_inbounds_nullgep() {
; LLPARSER-LABEL: define {{[^@]+}}@fold_ptrtoint_zero_nullgep_of_nonzero_inbounds_nullgep() {
; LLPARSER-NEXT: [[NONZERO_OFFSET:%.*]] = add i64 1234, 0
; LLPARSER-NEXT: [[ZERO_OFFSET:%.*]] = sub i64 [[NONZERO_OFFSET]], 1234
; LLPARSER-NEXT: [[PTR:%.*]] = getelementptr inbounds i8, i8 addrspace(1)* null, i64 [[NONZERO_OFFSET]]
; LLPARSER-NEXT: [[PTR2:%.*]] = getelementptr i8, i8 addrspace(1)* [[PTR]], i64 [[ZERO_OFFSET]]
; LLPARSER-NEXT: [[RET:%.*]] = ptrtoint i8 addrspace(1)* [[PTR2]] to i64
; LLPARSER-NEXT: ret i64 [[RET]]
;
; CHECK-LABEL: define {{[^@]+}}@fold_ptrtoint_zero_nullgep_of_nonzero_inbounds_nullgep() {
; CHECK-NEXT: ret i64 1234
;
%nonzero_offset = add i64 1234, 0
%zero_offset = sub i64 %nonzero_offset, 1234
%ptr = getelementptr inbounds i8, i8 addrspace(1)* null, i64 %nonzero_offset
%ptr2 = getelementptr i8, i8 addrspace(1)* %ptr, i64 %zero_offset
%ret = ptrtoint i8 addrspace(1)* %ptr2 to i64
ret i64 %ret
}
define i64 @fold_ptrtoint_nonzero_inbounds_nullgep_of_zero_noninbounds_nullgep() {
; LLPARSER-LABEL: define {{[^@]+}}@fold_ptrtoint_nonzero_inbounds_nullgep_of_zero_noninbounds_nullgep() {
; LLPARSER-NEXT: [[NONZERO_OFFSET:%.*]] = add i64 1234, 0
; LLPARSER-NEXT: [[ZERO_OFFSET:%.*]] = sub i64 [[NONZERO_OFFSET]], 1234
; LLPARSER-NEXT: [[PTR:%.*]] = getelementptr i8, i8 addrspace(1)* null, i64 [[ZERO_OFFSET]]
; LLPARSER-NEXT: [[PTR2:%.*]] = getelementptr inbounds i8, i8 addrspace(1)* [[PTR]], i64 [[NONZERO_OFFSET]]
; LLPARSER-NEXT: [[RET:%.*]] = ptrtoint i8 addrspace(1)* [[PTR2]] to i64
; LLPARSER-NEXT: ret i64 [[RET]]
;
; CHECK-LABEL: define {{[^@]+}}@fold_ptrtoint_nonzero_inbounds_nullgep_of_zero_noninbounds_nullgep() {
; CHECK-NEXT: ret i64 1234
;
%nonzero_offset = add i64 1234, 0
%zero_offset = sub i64 %nonzero_offset, 1234
%ptr = getelementptr i8, i8 addrspace(1)* null, i64 %zero_offset
%ptr2 = getelementptr inbounds i8, i8 addrspace(1)* %ptr, i64 %nonzero_offset
%ret = ptrtoint i8 addrspace(1)* %ptr2 to i64
ret i64 %ret
}
; We should also be able to fold GEPs with multiple indices.
%struct.S = type { [2 x %struct.K] }
%struct.K = type { [32 x i8] }
define i64 @fold_complex_index_last_nonzero(i64 %x) local_unnamed_addr #0 {
; LLPARSER-LABEL: define {{[^@]+}}@fold_complex_index_last_nonzero
; LLPARSER-SAME: (i64 [[X:%.*]]) local_unnamed_addr {
; LLPARSER-NEXT: entry:
; LLPARSER-NEXT: [[PTR:%.*]] = getelementptr inbounds [[STRUCT_S:%.*]], [[STRUCT_S]] addrspace(1)* null, i64 0, i32 0, i64 0, i32 0, i64 [[X]]
; LLPARSER-NEXT: [[RET:%.*]] = ptrtoint i8 addrspace(1)* [[PTR]] to i64
; LLPARSER-NEXT: ret i64 [[RET]]
;
; INSTSIMPLIFY-LABEL: define {{[^@]+}}@fold_complex_index_last_nonzero
; INSTSIMPLIFY-SAME: (i64 [[X:%.*]]) local_unnamed_addr {
; INSTSIMPLIFY-NEXT: entry:
; INSTSIMPLIFY-NEXT: [[PTR:%.*]] = getelementptr inbounds [[STRUCT_S:%.*]], [[STRUCT_S]] addrspace(1)* null, i64 0, i32 0, i64 0, i32 0, i64 [[X]]
; INSTSIMPLIFY-NEXT: [[RET:%.*]] = ptrtoint i8 addrspace(1)* [[PTR]] to i64
; INSTSIMPLIFY-NEXT: ret i64 [[RET]]
;
; INSTCOMBINE-LABEL: define {{[^@]+}}@fold_complex_index_last_nonzero
; INSTCOMBINE-SAME: (i64 [[X:%.*]]) local_unnamed_addr {
; INSTCOMBINE-NEXT: entry:
; INSTCOMBINE-NEXT: ret i64 [[X]]
;
entry:
%ptr = getelementptr inbounds %struct.S, %struct.S addrspace(1)* null, i64 0, i32 0, i64 0, i32 0, i64 %x
%ret = ptrtoint i8 addrspace(1)* %ptr to i64
ret i64 %ret
}
define i64 @fold_complex_index_multiple_nonzero(i64 %x) local_unnamed_addr #0 {
; LLPARSER-LABEL: define {{[^@]+}}@fold_complex_index_multiple_nonzero
; LLPARSER-SAME: (i64 [[X:%.*]]) local_unnamed_addr {
; LLPARSER-NEXT: entry:
; LLPARSER-NEXT: [[PTR:%.*]] = getelementptr inbounds [[STRUCT_S:%.*]], [[STRUCT_S]] addrspace(1)* null, i64 1, i32 0, i64 1, i32 0, i64 [[X]]
; LLPARSER-NEXT: [[RET:%.*]] = ptrtoint i8 addrspace(1)* [[PTR]] to i64
; LLPARSER-NEXT: ret i64 [[RET]]
;
; INSTSIMPLIFY-LABEL: define {{[^@]+}}@fold_complex_index_multiple_nonzero
; INSTSIMPLIFY-SAME: (i64 [[X:%.*]]) local_unnamed_addr {
; INSTSIMPLIFY-NEXT: entry:
; INSTSIMPLIFY-NEXT: [[PTR:%.*]] = getelementptr inbounds [[STRUCT_S:%.*]], [[STRUCT_S]] addrspace(1)* null, i64 1, i32 0, i64 1, i32 0, i64 [[X]]
; INSTSIMPLIFY-NEXT: [[RET:%.*]] = ptrtoint i8 addrspace(1)* [[PTR]] to i64
; INSTSIMPLIFY-NEXT: ret i64 [[RET]]
;
; INSTCOMBINE-LABEL: define {{[^@]+}}@fold_complex_index_multiple_nonzero
; INSTCOMBINE-SAME: (i64 [[X:%.*]]) local_unnamed_addr {
; INSTCOMBINE-NEXT: entry:
; INSTCOMBINE-NEXT: [[PTR_OFFS:%.*]] = add nsw i64 [[X]], 96
; INSTCOMBINE-NEXT: ret i64 [[PTR_OFFS]]
;
entry:
%ptr = getelementptr inbounds %struct.S, %struct.S addrspace(1)* null, i64 1, i32 0, i64 1, i32 0, i64 %x
%ret = ptrtoint i8 addrspace(1)* %ptr to i64
ret i64 %ret
}
define i64 @fold_ptrtoint_inbounds_nullgep_of_nonzero_inbounds_nullgep() {
; LLPARSER-LABEL: define {{[^@]+}}@fold_ptrtoint_inbounds_nullgep_of_nonzero_inbounds_nullgep() {
; LLPARSER-NEXT: [[NONZERO_OFFSET:%.*]] = add i64 1234, 0
; LLPARSER-NEXT: [[ZERO_OFFSET:%.*]] = sub i64 [[NONZERO_OFFSET]], 1234
; LLPARSER-NEXT: [[PTR:%.*]] = getelementptr inbounds i8, i8 addrspace(1)* null, i64 [[NONZERO_OFFSET]]
; LLPARSER-NEXT: [[PTR2:%.*]] = getelementptr inbounds i8, i8 addrspace(1)* [[PTR]], i64 [[ZERO_OFFSET]]
; LLPARSER-NEXT: [[RET:%.*]] = ptrtoint i8 addrspace(1)* [[PTR2]] to i64
; LLPARSER-NEXT: ret i64 [[RET]]
;
; CHECK-LABEL: define {{[^@]+}}@fold_ptrtoint_inbounds_nullgep_of_nonzero_inbounds_nullgep() {
; CHECK-NEXT: ret i64 1234
;
%nonzero_offset = add i64 1234, 0
%zero_offset = sub i64 %nonzero_offset, 1234
%ptr = getelementptr inbounds i8, i8 addrspace(1)* null, i64 %nonzero_offset
%ptr2 = getelementptr inbounds i8, i8 addrspace(1)* %ptr, i64 %zero_offset
%ret = ptrtoint i8 addrspace(1)* %ptr2 to i64
ret i64 %ret
}
; Check that InstCombine can convert ptrtoint(gep null) with multiple indices
define i64 @fold_ptrtoint_nullgep_array_one_var_1(i64 %x) {
; LLPARSER-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_array_one_var_1
; LLPARSER-SAME: (i64 [[X:%.*]]) {
; LLPARSER-NEXT: [[PTR:%.*]] = getelementptr [2 x i16], [2 x i16] addrspace(1)* null, i64 [[X]], i64 3
; LLPARSER-NEXT: [[RET:%.*]] = ptrtoint i16 addrspace(1)* [[PTR]] to i64
; LLPARSER-NEXT: ret i64 [[RET]]
;
; INSTSIMPLIFY-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_array_one_var_1
; INSTSIMPLIFY-SAME: (i64 [[X:%.*]]) {
; INSTSIMPLIFY-NEXT: [[PTR:%.*]] = getelementptr [2 x i16], [2 x i16] addrspace(1)* null, i64 [[X]], i64 3
; INSTSIMPLIFY-NEXT: [[RET:%.*]] = ptrtoint i16 addrspace(1)* [[PTR]] to i64
; INSTSIMPLIFY-NEXT: ret i64 [[RET]]
;
; INSTCOMBINE-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_array_one_var_1
; INSTCOMBINE-SAME: (i64 [[X:%.*]]) {
; INSTCOMBINE-NEXT: [[PTR_IDX:%.*]] = shl i64 [[X]], 2
; INSTCOMBINE-NEXT: [[PTR_OFFS:%.*]] = add i64 [[PTR_IDX]], 6
; INSTCOMBINE-NEXT: ret i64 [[PTR_OFFS]]
;
%ptr = getelementptr [2 x i16], [2 x i16] addrspace(1)* null, i64 %x, i64 3
%ret = ptrtoint i16 addrspace(1)* %ptr to i64
ret i64 %ret
}
define i64 @fold_ptrtoint_nullgep_array_one_var_2(i64 %x) {
; LLPARSER-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_array_one_var_2
; LLPARSER-SAME: (i64 [[X:%.*]]) {
; LLPARSER-NEXT: [[PTR:%.*]] = getelementptr [2 x i16], [2 x i16] addrspace(1)* null, i64 7, i64 [[X]]
; LLPARSER-NEXT: [[RET:%.*]] = ptrtoint i16 addrspace(1)* [[PTR]] to i64
; LLPARSER-NEXT: ret i64 [[RET]]
;
; INSTSIMPLIFY-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_array_one_var_2
; INSTSIMPLIFY-SAME: (i64 [[X:%.*]]) {
; INSTSIMPLIFY-NEXT: [[PTR:%.*]] = getelementptr [2 x i16], [2 x i16] addrspace(1)* null, i64 7, i64 [[X]]
; INSTSIMPLIFY-NEXT: [[RET:%.*]] = ptrtoint i16 addrspace(1)* [[PTR]] to i64
; INSTSIMPLIFY-NEXT: ret i64 [[RET]]
;
; INSTCOMBINE-LABEL: define {{[^@]+}}@fold_ptrtoint_nullgep_array_one_var_2
; INSTCOMBINE-SAME: (i64 [[X:%.*]]) {
; INSTCOMBINE-NEXT: [[PTR_IDX:%.*]] = shl i64 [[X]], 1
; INSTCOMBINE-NEXT: [[PTR_OFFS:%.*]] = add i64 [[PTR_IDX]], 28
; INSTCOMBINE-NEXT: ret i64 [[PTR_OFFS]]
;
%ptr = getelementptr [2 x i16], [2 x i16] addrspace(1)* null, i64 7, i64 %x
%ret = ptrtoint i16 addrspace(1)* %ptr to i64
ret i64 %ret
}
define i64 @fold_ptrtoint_nested_array_two_vars(i64 %x, i64 %y) {
; LLPARSER-LABEL: define {{[^@]+}}@fold_ptrtoint_nested_array_two_vars
; LLPARSER-SAME: (i64 [[X:%.*]], i64 [[Y:%.*]]) {
; LLPARSER-NEXT: [[PTR:%.*]] = getelementptr [2 x i16], [2 x i16] addrspace(1)* null, i64 [[X]], i64 [[Y]]
; LLPARSER-NEXT: [[RET:%.*]] = ptrtoint i16 addrspace(1)* [[PTR]] to i64
; LLPARSER-NEXT: ret i64 [[RET]]
;
; INSTSIMPLIFY-LABEL: define {{[^@]+}}@fold_ptrtoint_nested_array_two_vars
; INSTSIMPLIFY-SAME: (i64 [[X:%.*]], i64 [[Y:%.*]]) {
; INSTSIMPLIFY-NEXT: [[PTR:%.*]] = getelementptr [2 x i16], [2 x i16] addrspace(1)* null, i64 [[X]], i64 [[Y]]
; INSTSIMPLIFY-NEXT: [[RET:%.*]] = ptrtoint i16 addrspace(1)* [[PTR]] to i64
; INSTSIMPLIFY-NEXT: ret i64 [[RET]]
;
; INSTCOMBINE-LABEL: define {{[^@]+}}@fold_ptrtoint_nested_array_two_vars
; INSTCOMBINE-SAME: (i64 [[X:%.*]], i64 [[Y:%.*]]) {
; INSTCOMBINE-NEXT: [[PTR_IDX:%.*]] = shl i64 [[X]], 2
; INSTCOMBINE-NEXT: [[PTR_IDX1:%.*]] = shl i64 [[Y]], 1
; INSTCOMBINE-NEXT: [[PTR_OFFS:%.*]] = add i64 [[PTR_IDX]], [[PTR_IDX1]]
; INSTCOMBINE-NEXT: ret i64 [[PTR_OFFS]]
;
%ptr = getelementptr [2 x i16], [2 x i16] addrspace(1)* null, i64 %x, i64 %y
%ret = ptrtoint i16 addrspace(1)* %ptr to i64
ret i64 %ret
}
define i64 @fold_ptrtoint_nested_array_two_vars_plus_zero(i64 %x, i64 %y) {
; LLPARSER-LABEL: define {{[^@]+}}@fold_ptrtoint_nested_array_two_vars_plus_zero
; LLPARSER-SAME: (i64 [[X:%.*]], i64 [[Y:%.*]]) {
; LLPARSER-NEXT: [[PTR:%.*]] = getelementptr [2 x [2 x i16]], [2 x [2 x i16]] addrspace(1)* null, i64 [[X]], i64 [[Y]], i64 0
; LLPARSER-NEXT: [[RET:%.*]] = ptrtoint i16 addrspace(1)* [[PTR]] to i64
; LLPARSER-NEXT: ret i64 [[RET]]
;
; INSTSIMPLIFY-LABEL: define {{[^@]+}}@fold_ptrtoint_nested_array_two_vars_plus_zero
; INSTSIMPLIFY-SAME: (i64 [[X:%.*]], i64 [[Y:%.*]]) {
; INSTSIMPLIFY-NEXT: [[PTR:%.*]] = getelementptr [2 x [2 x i16]], [2 x [2 x i16]] addrspace(1)* null, i64 [[X]], i64 [[Y]], i64 0
; INSTSIMPLIFY-NEXT: [[RET:%.*]] = ptrtoint i16 addrspace(1)* [[PTR]] to i64
; INSTSIMPLIFY-NEXT: ret i64 [[RET]]
;
; INSTCOMBINE-LABEL: define {{[^@]+}}@fold_ptrtoint_nested_array_two_vars_plus_zero
; INSTCOMBINE-SAME: (i64 [[X:%.*]], i64 [[Y:%.*]]) {
; INSTCOMBINE-NEXT: [[PTR_IDX:%.*]] = shl i64 [[X]], 3
; INSTCOMBINE-NEXT: [[PTR_IDX1:%.*]] = shl i64 [[Y]], 2
; INSTCOMBINE-NEXT: [[PTR_OFFS:%.*]] = add i64 [[PTR_IDX]], [[PTR_IDX1]]
; INSTCOMBINE-NEXT: ret i64 [[PTR_OFFS]]
;
%ptr = getelementptr [2 x [2 x i16]], [2 x [2 x i16]] addrspace(1)* null, i64 %x, i64 %y, i64 0
%ret = ptrtoint i16 addrspace(1)* %ptr to i64
ret i64 %ret
}
define i64 @fold_ptrtoint_nested_array_two_vars_plus_const(i64 %x, i64 %y) {
; LLPARSER-LABEL: define {{[^@]+}}@fold_ptrtoint_nested_array_two_vars_plus_const
; LLPARSER-SAME: (i64 [[X:%.*]], i64 [[Y:%.*]]) {
; LLPARSER-NEXT: [[PTR:%.*]] = getelementptr [2 x [2 x i16]], [2 x [2 x i16]] addrspace(1)* null, i64 [[X]], i64 [[Y]], i64 1
; LLPARSER-NEXT: [[RET:%.*]] = ptrtoint i16 addrspace(1)* [[PTR]] to i64
; LLPARSER-NEXT: ret i64 [[RET]]
;
; INSTSIMPLIFY-LABEL: define {{[^@]+}}@fold_ptrtoint_nested_array_two_vars_plus_const
; INSTSIMPLIFY-SAME: (i64 [[X:%.*]], i64 [[Y:%.*]]) {
; INSTSIMPLIFY-NEXT: [[PTR:%.*]] = getelementptr [2 x [2 x i16]], [2 x [2 x i16]] addrspace(1)* null, i64 [[X]], i64 [[Y]], i64 1
; INSTSIMPLIFY-NEXT: [[RET:%.*]] = ptrtoint i16 addrspace(1)* [[PTR]] to i64
; INSTSIMPLIFY-NEXT: ret i64 [[RET]]
;
; INSTCOMBINE-LABEL: define {{[^@]+}}@fold_ptrtoint_nested_array_two_vars_plus_const
; INSTCOMBINE-SAME: (i64 [[X:%.*]], i64 [[Y:%.*]]) {
; INSTCOMBINE-NEXT: [[PTR_IDX:%.*]] = shl i64 [[X]], 3
; INSTCOMBINE-NEXT: [[PTR_IDX1:%.*]] = shl i64 [[Y]], 2
; INSTCOMBINE-NEXT: [[PTR_OFFS:%.*]] = add i64 [[PTR_IDX]], [[PTR_IDX1]]
; INSTCOMBINE-NEXT: [[PTR_OFFS2:%.*]] = or i64 [[PTR_OFFS]], 2
; INSTCOMBINE-NEXT: ret i64 [[PTR_OFFS2]]
;
%ptr = getelementptr [2 x [2 x i16]], [2 x [2 x i16]] addrspace(1)* null, i64 %x, i64 %y, i64 1
%ret = ptrtoint i16 addrspace(1)* %ptr to i64
ret i64 %ret
}
; Negative test -- should not be folded since there are multiple GEP uses
define i64 @fold_ptrtoint_nested_nullgep_array_variable_multiple_uses(i64 %x, i64 %y) {
; ALL-LABEL: define {{[^@]+}}@fold_ptrtoint_nested_nullgep_array_variable_multiple_uses
; ALL-SAME: (i64 [[X:%.*]], i64 [[Y:%.*]]) {
; ALL-NEXT: [[PTR:%.*]] = getelementptr [2 x i16], [2 x i16] addrspace(1)* null, i64 [[X]], i64 [[Y]]
; ALL-NEXT: [[PTRI8:%.*]] = bitcast i16 addrspace(1)* [[PTR]] to i8 addrspace(1)*
; ALL-NEXT: call void @use_ptr(i8 addrspace(1)* [[PTRI8]])
; ALL-NEXT: [[RET:%.*]] = ptrtoint i16 addrspace(1)* [[PTR]] to i64
; ALL-NEXT: ret i64 [[RET]]
;
%ptr = getelementptr [2 x i16], [2 x i16] addrspace(1)* null, i64 %x, i64 %y
%ptri8 = bitcast i16 addrspace(1)* %ptr to i8 addrspace(1)*
call void @use_ptr(i8 addrspace(1)* %ptri8)
%ret = ptrtoint i16 addrspace(1)* %ptr to i64
ret i64 %ret
}