|
1 | 1 | ; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
|
2 | 2 | ; RUN: opt < %s -basic-aa -dse -S | FileCheck %s
|
3 |
| -; RUN: opt < %s -aa-pipeline=basic-aa -passes=dse -S | FileCheck %s |
| 3 | +; RUN: opt < %s -aa-pipeline=basic-aa -passes='dse,verify<memoryssa>' -S | FileCheck %s |
4 | 4 | target datalayout = "E-p:64:64:64-a0:0:8-f32:32:32-f64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:32:64-v64:64:64-v128:128:128"
|
5 | 5 |
|
6 |
| -declare i8* @calloc(i64, i64) |
7 | 6 | declare void @memset_pattern16(i8*, i8*, i64)
|
8 | 7 |
|
9 | 8 | declare void @llvm.memset.p0i8.i64(i8* nocapture, i8, i64, i1) nounwind
|
@@ -309,6 +308,156 @@ entry:
|
309 | 308 | ret void
|
310 | 309 | }
|
311 | 310 |
|
| 311 | +declare noalias i8* @malloc(i64) |
| 312 | +declare noalias i8* @_Znwm(i64) |
| 313 | +declare void @clobber_memory(float*) |
| 314 | + |
| 315 | +; based on pr25892_lite |
| 316 | +define i8* @zero_memset_after_malloc(i64 %size) { |
| 317 | +; CHECK-LABEL: @zero_memset_after_malloc( |
| 318 | +; CHECK-NEXT: [[CALL:%.*]] = call i8* @calloc(i64 1, i64 [[SIZE:%.*]]) |
| 319 | +; CHECK-NEXT: ret i8* [[CALL]] |
| 320 | +; |
| 321 | + %call = call i8* @malloc(i64 %size) inaccessiblememonly |
| 322 | + call void @llvm.memset.p0i8.i64(i8* %call, i8 0, i64 %size, i1 false) |
| 323 | + ret i8* %call |
| 324 | +} |
| 325 | + |
| 326 | +; based on pr25892_lite |
| 327 | +define i8* @zero_memset_after_malloc_with_intermediate_clobbering(i64 %size) { |
| 328 | +; CHECK-LABEL: @zero_memset_after_malloc_with_intermediate_clobbering( |
| 329 | +; CHECK-NEXT: [[CALL:%.*]] = call i8* @malloc(i64 [[SIZE:%.*]]) |
| 330 | +; CHECK-NEXT: [[BC:%.*]] = bitcast i8* [[CALL]] to float* |
| 331 | +; CHECK-NEXT: call void @clobber_memory(float* [[BC]]) |
| 332 | +; CHECK-NEXT: call void @llvm.memset.p0i8.i64(i8* [[CALL]], i8 0, i64 [[SIZE]], i1 false) |
| 333 | +; CHECK-NEXT: ret i8* [[CALL]] |
| 334 | +; |
| 335 | + %call = call i8* @malloc(i64 %size) inaccessiblememonly |
| 336 | + %bc = bitcast i8* %call to float* |
| 337 | + call void @clobber_memory(float* %bc) |
| 338 | + call void @llvm.memset.p0i8.i64(i8* %call, i8 0, i64 %size, i1 false) |
| 339 | + ret i8* %call |
| 340 | +} |
| 341 | + |
| 342 | +; based on pr25892_lite |
| 343 | +define i8* @zero_memset_after_malloc_with_different_sizes(i64 %size) { |
| 344 | +; CHECK-LABEL: @zero_memset_after_malloc_with_different_sizes( |
| 345 | +; CHECK-NEXT: [[CALL:%.*]] = call i8* @malloc(i64 [[SIZE:%.*]]) |
| 346 | +; CHECK-NEXT: [[SIZE2:%.*]] = add nsw i64 [[SIZE]], -1 |
| 347 | +; CHECK-NEXT: call void @llvm.memset.p0i8.i64(i8* [[CALL]], i8 0, i64 [[SIZE2]], i1 false) |
| 348 | +; CHECK-NEXT: ret i8* [[CALL]] |
| 349 | +; |
| 350 | + %call = call i8* @malloc(i64 %size) inaccessiblememonly |
| 351 | + %size2 = add nsw i64 %size, -1 |
| 352 | + call void @llvm.memset.p0i8.i64(i8* %call, i8 0, i64 %size2, i1 false) |
| 353 | + ret i8* %call |
| 354 | +} |
| 355 | + |
| 356 | +; based on pr25892_lite |
| 357 | +define i8* @zero_memset_after_new(i64 %size) { |
| 358 | +; CHECK-LABEL: @zero_memset_after_new( |
| 359 | +; CHECK-NEXT: [[CALL:%.*]] = call i8* @_Znwm(i64 [[SIZE:%.*]]) |
| 360 | +; CHECK-NEXT: call void @llvm.memset.p0i8.i64(i8* [[CALL]], i8 0, i64 [[SIZE]], i1 false) |
| 361 | +; CHECK-NEXT: ret i8* [[CALL]] |
| 362 | +; |
| 363 | + %call = call i8* @_Znwm(i64 %size) |
| 364 | + call void @llvm.memset.p0i8.i64(i8* %call, i8 0, i64 %size, i1 false) |
| 365 | + ret i8* %call |
| 366 | +} |
| 367 | + |
| 368 | +; This should not create a calloc and should not crash the compiler. |
| 369 | +define i8* @notmalloc_memset(i64 %size, i8*(i64)* %notmalloc) { |
| 370 | +; CHECK-LABEL: @notmalloc_memset( |
| 371 | +; CHECK-NEXT: [[CALL1:%.*]] = call i8* [[NOTMALLOC:%.*]](i64 [[SIZE:%.*]]) |
| 372 | +; CHECK-NEXT: call void @llvm.memset.p0i8.i64(i8* [[CALL1]], i8 0, i64 [[SIZE]], i1 false) |
| 373 | +; CHECK-NEXT: ret i8* [[CALL1]] |
| 374 | +; |
| 375 | + %call1 = call i8* %notmalloc(i64 %size) |
| 376 | + call void @llvm.memset.p0i8.i64(i8* %call1, i8 0, i64 %size, i1 false) |
| 377 | + ret i8* %call1 |
| 378 | +} |
| 379 | + |
| 380 | +; This should not create recursive call to calloc. |
| 381 | +define i8* @calloc(i64 %nmemb, i64 %size) { |
| 382 | +; CHECK-LABEL: @calloc( |
| 383 | +; CHECK: entry: |
| 384 | +; CHECK-NEXT: [[MUL:%.*]] = mul i64 [[SIZE:%.*]], [[NMEMB:%.*]] |
| 385 | +; CHECK-NEXT: [[CALL:%.*]] = tail call noalias align 16 i8* @malloc(i64 [[MUL]]) |
| 386 | +; CHECK-NEXT: [[TOBOOL_NOT:%.*]] = icmp eq i8* [[CALL]], null |
| 387 | +; CHECK-NEXT: br i1 [[TOBOOL_NOT]], label [[IF_END:%.*]], label [[IF_THEN:%.*]] |
| 388 | +; CHECK: if.then: |
| 389 | +; CHECK-NEXT: tail call void @llvm.memset.p0i8.i64(i8* nonnull align 16 [[CALL]], i8 0, i64 [[MUL]], i1 false) |
| 390 | +; CHECK-NEXT: br label [[IF_END]] |
| 391 | +; CHECK: if.end: |
| 392 | +; CHECK-NEXT: ret i8* [[CALL]] |
| 393 | +; |
| 394 | +entry: |
| 395 | + %mul = mul i64 %size, %nmemb |
| 396 | + %call = tail call noalias align 16 i8* @malloc(i64 %mul) |
| 397 | + %tobool.not = icmp eq i8* %call, null |
| 398 | + br i1 %tobool.not, label %if.end, label %if.then |
| 399 | + |
| 400 | +if.then: ; preds = %entry |
| 401 | + tail call void @llvm.memset.p0i8.i64(i8* nonnull align 16 %call, i8 0, i64 %mul, i1 false) |
| 402 | + br label %if.end |
| 403 | + |
| 404 | +if.end: ; preds = %if.then, %entry |
| 405 | + ret i8* %call |
| 406 | +} |
| 407 | + |
| 408 | +define float* @pr25892(i64 %size) { |
| 409 | +; CHECK-LABEL: @pr25892( |
| 410 | +; CHECK: entry: |
| 411 | +; CHECK-NEXT: [[CALL:%.*]] = call i8* @calloc(i64 1, i64 [[SIZE:%.*]]) |
| 412 | +; CHECK-NEXT: [[CMP:%.*]] = icmp eq i8* [[CALL]], null |
| 413 | +; CHECK-NEXT: br i1 [[CMP]], label [[CLEANUP:%.*]], label [[IF_END:%.*]] |
| 414 | +; CHECK: if.end: |
| 415 | +; CHECK-NEXT: [[BC:%.*]] = bitcast i8* [[CALL]] to float* |
| 416 | +; CHECK-NEXT: br label [[CLEANUP]] |
| 417 | +; CHECK: cleanup: |
| 418 | +; CHECK-NEXT: [[RETVAL_0:%.*]] = phi float* [ [[BC]], [[IF_END]] ], [ null, [[ENTRY:%.*]] ] |
| 419 | +; CHECK-NEXT: ret float* [[RETVAL_0]] |
| 420 | +; |
| 421 | +entry: |
| 422 | + %call = call i8* @malloc(i64 %size) inaccessiblememonly |
| 423 | + %cmp = icmp eq i8* %call, null |
| 424 | + br i1 %cmp, label %cleanup, label %if.end |
| 425 | +if.end: |
| 426 | + %bc = bitcast i8* %call to float* |
| 427 | + call void @llvm.memset.p0i8.i64(i8* %call, i8 0, i64 %size, i1 false) |
| 428 | + br label %cleanup |
| 429 | +cleanup: |
| 430 | + %retval.0 = phi float* [ %bc, %if.end ], [ null, %entry ] |
| 431 | + ret float* %retval.0 |
| 432 | +} |
| 433 | + |
| 434 | +define float* @pr25892_with_extra_store(i64 %size) { |
| 435 | +; CHECK-LABEL: @pr25892_with_extra_store( |
| 436 | +; CHECK: entry: |
| 437 | +; CHECK-NEXT: [[CALL:%.*]] = call i8* @calloc(i64 1, i64 [[SIZE:%.*]]) |
| 438 | +; CHECK-NEXT: [[CMP:%.*]] = icmp eq i8* [[CALL]], null |
| 439 | +; CHECK-NEXT: br i1 [[CMP]], label [[CLEANUP:%.*]], label [[IF_END:%.*]] |
| 440 | +; CHECK: if.end: |
| 441 | +; CHECK-NEXT: [[BC:%.*]] = bitcast i8* [[CALL]] to float* |
| 442 | +; CHECK-NEXT: br label [[CLEANUP]] |
| 443 | +; CHECK: cleanup: |
| 444 | +; CHECK-NEXT: [[RETVAL_0:%.*]] = phi float* [ [[BC]], [[IF_END]] ], [ null, [[ENTRY:%.*]] ] |
| 445 | +; CHECK-NEXT: ret float* [[RETVAL_0]] |
| 446 | +; |
| 447 | +entry: |
| 448 | + %call = call i8* @malloc(i64 %size) inaccessiblememonly |
| 449 | + %cmp = icmp eq i8* %call, null |
| 450 | + br i1 %cmp, label %cleanup, label %if.end |
| 451 | +if.end: |
| 452 | + %bc = bitcast i8* %call to float* |
| 453 | + call void @llvm.memset.p0i8.i64(i8* %call, i8 0, i64 %size, i1 false) |
| 454 | + store i8 0, i8* %call, align 1 |
| 455 | + br label %cleanup |
| 456 | +cleanup: |
| 457 | + %retval.0 = phi float* [ %bc, %if.end ], [ null, %entry ] |
| 458 | + ret float* %retval.0 |
| 459 | +} |
| 460 | + |
312 | 461 | ; PR50143
|
313 | 462 | define i8* @store_zero_after_calloc_inaccessiblememonly() {
|
314 | 463 | ; CHECK-LABEL: @store_zero_after_calloc_inaccessiblememonly(
|
|
0 commit comments