blob: 8ad715879aa767f7ab8c5304419101dae39d8a93 [file] [log] [blame]
// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 5
// RUN: %clang_cc1 -triple x86_64-unknown-unknown -emit-llvm -o - %s | FileCheck %s --check-prefix=X86_64
// RUN: %clang_cc1 -triple i386-unknown-unknown -emit-llvm -o - %s | FileCheck %s --check-prefix=I386
struct a {
char x;
short count;
int array[] __attribute__((counted_by(count)));
};
// X86_64-LABEL: define dso_local ptr @test1(
// X86_64-SAME: i32 noundef [[SIZE:%.*]]) #[[ATTR0:[0-9]+]] {
// X86_64-NEXT: [[ENTRY:.*:]]
// X86_64-NEXT: [[SIZE_ADDR:%.*]] = alloca i32, align 4
// X86_64-NEXT: [[P:%.*]] = alloca ptr, align 8
// X86_64-NEXT: store i32 [[SIZE]], ptr [[SIZE_ADDR]], align 4
// X86_64-NEXT: [[TMP0:%.*]] = load i32, ptr [[SIZE_ADDR]], align 4
// X86_64-NEXT: [[CONV:%.*]] = sext i32 [[TMP0]] to i64
// X86_64-NEXT: [[MUL:%.*]] = mul i64 4, [[CONV]]
// X86_64-NEXT: [[ADD:%.*]] = add i64 4, [[MUL]]
// X86_64-NEXT: [[CALL:%.*]] = call ptr @malloc(i64 noundef [[ADD]]) #[[ATTR2:[0-9]+]]
// X86_64-NEXT: store ptr [[CALL]], ptr [[P]], align 8
// X86_64-NEXT: [[TMP1:%.*]] = load i32, ptr [[SIZE_ADDR]], align 4
// X86_64-NEXT: [[CONV1:%.*]] = trunc i32 [[TMP1]] to i16
// X86_64-NEXT: [[TMP2:%.*]] = load ptr, ptr [[P]], align 8
// X86_64-NEXT: [[DOT_COUNTED_BY_GEP:%.*]] = getelementptr inbounds [[STRUCT_A:%.*]], ptr [[TMP2]], i32 0, i32 1
// X86_64-NEXT: store i16 [[CONV1]], ptr [[DOT_COUNTED_BY_GEP]], align 2
// X86_64-NEXT: [[TMP3:%.*]] = load ptr, ptr [[P]], align 8
// X86_64-NEXT: ret ptr [[TMP3]]
//
// I386-LABEL: define dso_local ptr @test1(
// I386-SAME: i32 noundef [[SIZE:%.*]]) #[[ATTR0:[0-9]+]] {
// I386-NEXT: [[ENTRY:.*:]]
// I386-NEXT: [[SIZE_ADDR:%.*]] = alloca i32, align 4
// I386-NEXT: [[P:%.*]] = alloca ptr, align 4
// I386-NEXT: store i32 [[SIZE]], ptr [[SIZE_ADDR]], align 4
// I386-NEXT: [[TMP0:%.*]] = load i32, ptr [[SIZE_ADDR]], align 4
// I386-NEXT: [[MUL:%.*]] = mul i32 4, [[TMP0]]
// I386-NEXT: [[ADD:%.*]] = add i32 4, [[MUL]]
// I386-NEXT: [[CALL:%.*]] = call ptr @malloc(i32 noundef [[ADD]]) #[[ATTR2:[0-9]+]]
// I386-NEXT: store ptr [[CALL]], ptr [[P]], align 4
// I386-NEXT: [[TMP1:%.*]] = load i32, ptr [[SIZE_ADDR]], align 4
// I386-NEXT: [[CONV:%.*]] = trunc i32 [[TMP1]] to i16
// I386-NEXT: [[TMP2:%.*]] = load ptr, ptr [[P]], align 4
// I386-NEXT: [[DOT_COUNTED_BY_GEP:%.*]] = getelementptr inbounds [[STRUCT_A:%.*]], ptr [[TMP2]], i32 0, i32 1
// I386-NEXT: store i16 [[CONV]], ptr [[DOT_COUNTED_BY_GEP]], align 2
// I386-NEXT: [[TMP3:%.*]] = load ptr, ptr [[P]], align 4
// I386-NEXT: ret ptr [[TMP3]]
//
struct a *test1(int size) {
struct a *p = __builtin_malloc(sizeof(struct a) + sizeof(int) * size);
*__builtin_counted_by_ref(p->array) = size;
return p;
}
struct b {
int _filler;
struct {
int __filler;
struct {
int ___filler;
struct {
char count;
};
};
};
struct {
int filler_;
struct {
int filler__;
struct {
long array[] __attribute__((counted_by(count)));
};
};
};
};
// X86_64-LABEL: define dso_local ptr @test2(
// X86_64-SAME: i32 noundef [[SIZE:%.*]]) #[[ATTR0]] {
// X86_64-NEXT: [[ENTRY:.*:]]
// X86_64-NEXT: [[SIZE_ADDR:%.*]] = alloca i32, align 4
// X86_64-NEXT: [[P:%.*]] = alloca ptr, align 8
// X86_64-NEXT: store i32 [[SIZE]], ptr [[SIZE_ADDR]], align 4
// X86_64-NEXT: [[TMP0:%.*]] = load i32, ptr [[SIZE_ADDR]], align 4
// X86_64-NEXT: [[CONV:%.*]] = sext i32 [[TMP0]] to i64
// X86_64-NEXT: [[MUL:%.*]] = mul i64 4, [[CONV]]
// X86_64-NEXT: [[ADD:%.*]] = add i64 4, [[MUL]]
// X86_64-NEXT: [[CALL:%.*]] = call ptr @malloc(i64 noundef [[ADD]]) #[[ATTR2]]
// X86_64-NEXT: store ptr [[CALL]], ptr [[P]], align 8
// X86_64-NEXT: [[TMP1:%.*]] = load i32, ptr [[SIZE_ADDR]], align 4
// X86_64-NEXT: [[CONV1:%.*]] = trunc i32 [[TMP1]] to i8
// X86_64-NEXT: [[TMP2:%.*]] = load ptr, ptr [[P]], align 8
// X86_64-NEXT: [[DOT_COUNTED_BY_GEP:%.*]] = getelementptr inbounds [[STRUCT_B:%.*]], ptr [[TMP2]], i32 0, i32 1, i32 1, i32 1, i32 0
// X86_64-NEXT: store i8 [[CONV1]], ptr [[DOT_COUNTED_BY_GEP]], align 1
// X86_64-NEXT: [[TMP3:%.*]] = load ptr, ptr [[P]], align 8
// X86_64-NEXT: ret ptr [[TMP3]]
//
// I386-LABEL: define dso_local ptr @test2(
// I386-SAME: i32 noundef [[SIZE:%.*]]) #[[ATTR0]] {
// I386-NEXT: [[ENTRY:.*:]]
// I386-NEXT: [[SIZE_ADDR:%.*]] = alloca i32, align 4
// I386-NEXT: [[P:%.*]] = alloca ptr, align 4
// I386-NEXT: store i32 [[SIZE]], ptr [[SIZE_ADDR]], align 4
// I386-NEXT: [[TMP0:%.*]] = load i32, ptr [[SIZE_ADDR]], align 4
// I386-NEXT: [[MUL:%.*]] = mul i32 4, [[TMP0]]
// I386-NEXT: [[ADD:%.*]] = add i32 4, [[MUL]]
// I386-NEXT: [[CALL:%.*]] = call ptr @malloc(i32 noundef [[ADD]]) #[[ATTR2]]
// I386-NEXT: store ptr [[CALL]], ptr [[P]], align 4
// I386-NEXT: [[TMP1:%.*]] = load i32, ptr [[SIZE_ADDR]], align 4
// I386-NEXT: [[CONV:%.*]] = trunc i32 [[TMP1]] to i8
// I386-NEXT: [[TMP2:%.*]] = load ptr, ptr [[P]], align 4
// I386-NEXT: [[DOT_COUNTED_BY_GEP:%.*]] = getelementptr inbounds [[STRUCT_B:%.*]], ptr [[TMP2]], i32 0, i32 1, i32 1, i32 1, i32 0
// I386-NEXT: store i8 [[CONV]], ptr [[DOT_COUNTED_BY_GEP]], align 1
// I386-NEXT: [[TMP3:%.*]] = load ptr, ptr [[P]], align 4
// I386-NEXT: ret ptr [[TMP3]]
//
struct b *test2(int size) {
struct b *p = __builtin_malloc(sizeof(struct a) + sizeof(int) * size);
*__builtin_counted_by_ref(p->array) = size;
return p;
}
struct c {
char x;
short count;
int array[];
};
// X86_64-LABEL: define dso_local ptr @test3(
// X86_64-SAME: i32 noundef [[SIZE:%.*]]) #[[ATTR0]] {
// X86_64-NEXT: [[ENTRY:.*:]]
// X86_64-NEXT: [[SIZE_ADDR:%.*]] = alloca i32, align 4
// X86_64-NEXT: [[P:%.*]] = alloca ptr, align 8
// X86_64-NEXT: [[__IGNORED:%.*]] = alloca i64, align 8
// X86_64-NEXT: store i32 [[SIZE]], ptr [[SIZE_ADDR]], align 4
// X86_64-NEXT: [[TMP0:%.*]] = load i32, ptr [[SIZE_ADDR]], align 4
// X86_64-NEXT: [[CONV:%.*]] = sext i32 [[TMP0]] to i64
// X86_64-NEXT: [[MUL:%.*]] = mul i64 4, [[CONV]]
// X86_64-NEXT: [[ADD:%.*]] = add i64 4, [[MUL]]
// X86_64-NEXT: [[CALL:%.*]] = call ptr @malloc(i64 noundef [[ADD]]) #[[ATTR2]]
// X86_64-NEXT: store ptr [[CALL]], ptr [[P]], align 8
// X86_64-NEXT: [[TMP1:%.*]] = load i32, ptr [[SIZE_ADDR]], align 4
// X86_64-NEXT: [[CONV1:%.*]] = sext i32 [[TMP1]] to i64
// X86_64-NEXT: store i64 [[CONV1]], ptr [[__IGNORED]], align 8
// X86_64-NEXT: [[TMP2:%.*]] = load ptr, ptr [[P]], align 8
// X86_64-NEXT: ret ptr [[TMP2]]
//
// I386-LABEL: define dso_local ptr @test3(
// I386-SAME: i32 noundef [[SIZE:%.*]]) #[[ATTR0]] {
// I386-NEXT: [[ENTRY:.*:]]
// I386-NEXT: [[SIZE_ADDR:%.*]] = alloca i32, align 4
// I386-NEXT: [[P:%.*]] = alloca ptr, align 4
// I386-NEXT: [[__IGNORED:%.*]] = alloca i32, align 4
// I386-NEXT: store i32 [[SIZE]], ptr [[SIZE_ADDR]], align 4
// I386-NEXT: [[TMP0:%.*]] = load i32, ptr [[SIZE_ADDR]], align 4
// I386-NEXT: [[MUL:%.*]] = mul i32 4, [[TMP0]]
// I386-NEXT: [[ADD:%.*]] = add i32 4, [[MUL]]
// I386-NEXT: [[CALL:%.*]] = call ptr @malloc(i32 noundef [[ADD]]) #[[ATTR2]]
// I386-NEXT: store ptr [[CALL]], ptr [[P]], align 4
// I386-NEXT: [[TMP1:%.*]] = load i32, ptr [[SIZE_ADDR]], align 4
// I386-NEXT: store i32 [[TMP1]], ptr [[__IGNORED]], align 4
// I386-NEXT: [[TMP2:%.*]] = load ptr, ptr [[P]], align 4
// I386-NEXT: ret ptr [[TMP2]]
//
struct c *test3(int size) {
struct c *p = __builtin_malloc(sizeof(struct c) + sizeof(int) * size);
unsigned long int __ignored;
*_Generic(
__builtin_counted_by_ref(p->array),
void *: &__ignored,
default: __builtin_counted_by_ref(p->array)) = size;
return p;
}