137 lines
4.9 KiB
LLVM
137 lines
4.9 KiB
LLVM
; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
|
|
; RUN: opt -S -passes=gvn < %s | FileCheck %s
|
|
|
|
@u = global i32 5, align 4
|
|
@w = global i32 10, align 4
|
|
|
|
define i32 @test_load_seq_cst_unordered() {
|
|
; CHECK-LABEL: @test_load_seq_cst_unordered(
|
|
; CHECK-NEXT: [[L1:%.*]] = load atomic i32, i32* @w unordered, align 4
|
|
; CHECK-NEXT: [[LV:%.*]] = load atomic i32, i32* @u seq_cst, align 4
|
|
; CHECK-NEXT: [[L2:%.*]] = load atomic i32, i32* @w unordered, align 4
|
|
; CHECK-NEXT: [[RES_1:%.*]] = sub i32 [[L1]], [[L2]]
|
|
; CHECK-NEXT: [[RES:%.*]] = add i32 [[RES_1]], [[LV]]
|
|
; CHECK-NEXT: ret i32 [[RES]]
|
|
;
|
|
%l1 = load atomic i32, i32* @w unordered, align 4
|
|
%lv = load atomic i32, i32* @u seq_cst, align 4
|
|
%l2 = load atomic i32, i32* @w unordered, align 4
|
|
%res.1 = sub i32 %l1, %l2
|
|
%res = add i32 %res.1, %lv
|
|
ret i32 %res
|
|
}
|
|
|
|
define i32 @test_load_acquire_unordered() {
|
|
; CHECK-LABEL: @test_load_acquire_unordered(
|
|
; CHECK-NEXT: [[L1:%.*]] = load atomic i32, i32* @w unordered, align 4
|
|
; CHECK-NEXT: [[LV:%.*]] = load atomic i32, i32* @u acquire, align 4
|
|
; CHECK-NEXT: [[L2:%.*]] = load atomic i32, i32* @w unordered, align 4
|
|
; CHECK-NEXT: [[RES_1:%.*]] = sub i32 [[L1]], [[L2]]
|
|
; CHECK-NEXT: [[RES:%.*]] = add i32 [[RES_1]], [[LV]]
|
|
; CHECK-NEXT: ret i32 [[RES]]
|
|
;
|
|
%l1 = load atomic i32, i32* @w unordered, align 4
|
|
%lv = load atomic i32, i32* @u acquire, align 4
|
|
%l2 = load atomic i32, i32* @w unordered, align 4
|
|
%res.1 = sub i32 %l1, %l2
|
|
%res = add i32 %res.1, %lv
|
|
ret i32 %res
|
|
}
|
|
|
|
define i32 @test_store_cst_unordered(i32 %x) {
|
|
; CHECK-LABEL: @test_store_cst_unordered(
|
|
; CHECK-NEXT: store atomic i32 [[X:%.*]], i32* @u seq_cst, align 4
|
|
; CHECK-NEXT: ret i32 0
|
|
;
|
|
%l1 = load atomic i32, i32* @w unordered, align 4
|
|
store atomic i32 %x, i32* @u seq_cst, align 4
|
|
%l2 = load atomic i32, i32* @w unordered, align 4
|
|
%res = sub i32 %l1, %l2
|
|
ret i32 %res
|
|
}
|
|
|
|
define i32 @test_store_release_unordered(i32 %x) {
|
|
; CHECK-LABEL: @test_store_release_unordered(
|
|
; CHECK-NEXT: store atomic i32 [[X:%.*]], i32* @u release, align 4
|
|
; CHECK-NEXT: ret i32 0
|
|
;
|
|
%l1 = load atomic i32, i32* @w unordered, align 4
|
|
store atomic i32 %x, i32* @u release, align 4
|
|
%l2 = load atomic i32, i32* @w unordered, align 4
|
|
%res = sub i32 %l1, %l2
|
|
ret i32 %res
|
|
}
|
|
|
|
define i32 @test_stores_seq_cst_unordered(i32 %x) {
|
|
; CHECK-LABEL: @test_stores_seq_cst_unordered(
|
|
; CHECK-NEXT: store atomic i32 [[X:%.*]], i32* @w unordered, align 4
|
|
; CHECK-NEXT: store atomic i32 [[X]], i32* @u seq_cst, align 4
|
|
; CHECK-NEXT: store atomic i32 0, i32* @w unordered, align 4
|
|
; CHECK-NEXT: ret i32 0
|
|
;
|
|
store atomic i32 %x, i32* @w unordered, align 4
|
|
store atomic i32 %x, i32* @u seq_cst, align 4
|
|
store atomic i32 0, i32* @w unordered, align 4
|
|
ret i32 0
|
|
}
|
|
|
|
define i32 @test_stores_release_unordered(i32 %x) {
|
|
; CHECK-LABEL: @test_stores_release_unordered(
|
|
; CHECK-NEXT: store atomic i32 [[X:%.*]], i32* @w unordered, align 4
|
|
; CHECK-NEXT: store atomic i32 [[X]], i32* @u release, align 4
|
|
; CHECK-NEXT: store atomic i32 0, i32* @w unordered, align 4
|
|
; CHECK-NEXT: ret i32 0
|
|
;
|
|
store atomic i32 %x, i32* @w unordered, align 4
|
|
store atomic i32 %x, i32* @u release, align 4
|
|
store atomic i32 0, i32* @w unordered, align 4
|
|
ret i32 0
|
|
}
|
|
|
|
|
|
; Must respect total order for seq_cst even for unrelated addresses
|
|
define i32 @neg_load_seq_cst() {
|
|
; CHECK-LABEL: @neg_load_seq_cst(
|
|
; CHECK-NEXT: [[L1:%.*]] = load atomic i32, i32* @w seq_cst, align 4
|
|
; CHECK-NEXT: [[LV:%.*]] = load atomic i32, i32* @u seq_cst, align 4
|
|
; CHECK-NEXT: [[L2:%.*]] = load atomic i32, i32* @w seq_cst, align 4
|
|
; CHECK-NEXT: [[RES_1:%.*]] = sub i32 [[L1]], [[L2]]
|
|
; CHECK-NEXT: [[RES:%.*]] = add i32 [[RES_1]], [[LV]]
|
|
; CHECK-NEXT: ret i32 [[RES]]
|
|
;
|
|
%l1 = load atomic i32, i32* @w seq_cst, align 4
|
|
%lv = load atomic i32, i32* @u seq_cst, align 4
|
|
%l2 = load atomic i32, i32* @w seq_cst, align 4
|
|
%res.1 = sub i32 %l1, %l2
|
|
%res = add i32 %res.1, %lv
|
|
ret i32 %res
|
|
}
|
|
|
|
define i32 @neg_store_seq_cst(i32 %x) {
|
|
; CHECK-LABEL: @neg_store_seq_cst(
|
|
; CHECK-NEXT: [[L1:%.*]] = load atomic i32, i32* @w seq_cst, align 4
|
|
; CHECK-NEXT: store atomic i32 [[X:%.*]], i32* @u seq_cst, align 4
|
|
; CHECK-NEXT: [[L2:%.*]] = load atomic i32, i32* @w seq_cst, align 4
|
|
; CHECK-NEXT: [[RES:%.*]] = sub i32 [[L1]], [[L2]]
|
|
; CHECK-NEXT: ret i32 [[RES]]
|
|
;
|
|
%l1 = load atomic i32, i32* @w seq_cst, align 4
|
|
store atomic i32 %x, i32* @u seq_cst, align 4
|
|
%l2 = load atomic i32, i32* @w seq_cst, align 4
|
|
%res = sub i32 %l1, %l2
|
|
ret i32 %res
|
|
}
|
|
|
|
define i32 @neg_stores_seq_cst(i32 %x) {
|
|
; CHECK-LABEL: @neg_stores_seq_cst(
|
|
; CHECK-NEXT: store atomic i32 [[X:%.*]], i32* @w seq_cst, align 4
|
|
; CHECK-NEXT: store atomic i32 [[X]], i32* @u seq_cst, align 4
|
|
; CHECK-NEXT: store atomic i32 0, i32* @w seq_cst, align 4
|
|
; CHECK-NEXT: ret i32 0
|
|
;
|
|
store atomic i32 %x, i32* @w seq_cst, align 4
|
|
store atomic i32 %x, i32* @u seq_cst, align 4
|
|
store atomic i32 0, i32* @w seq_cst, align 4
|
|
ret i32 0
|
|
}
|