You've already forked linux-packaging-mono
							
							
		
			
				
	
	
		
			146 lines
		
	
	
		
			4.9 KiB
		
	
	
	
		
			LLVM
		
	
	
	
	
	
			
		
		
	
	
			146 lines
		
	
	
		
			4.9 KiB
		
	
	
	
		
			LLVM
		
	
	
	
	
	
| ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
 | |
| ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512bw | FileCheck %s
 | |
| 
 | |
| ; Function Attrs: norecurse nounwind readnone
 | |
| define zeroext i32 @TEST_mm512_test_epi16_mask(<8 x i64> %__A, <8 x i64> %__B) local_unnamed_addr #0 {
 | |
| ; CHECK-LABEL: TEST_mm512_test_epi16_mask:
 | |
| ; CHECK:       # %bb.0: # %entry
 | |
| ; CHECK-NEXT:    vptestmw %zmm0, %zmm1, %k0
 | |
| ; CHECK-NEXT:    kmovd %k0, %eax
 | |
| ; CHECK-NEXT:    vzeroupper
 | |
| ; CHECK-NEXT:    retq
 | |
| entry:
 | |
|   %and.i.i = and <8 x i64> %__B, %__A
 | |
|   %0 = bitcast <8 x i64> %and.i.i to <32 x i16>
 | |
|   %1 = icmp ne <32 x i16> %0, zeroinitializer
 | |
|   %2 = bitcast <32 x i1> %1 to i32
 | |
|   ret i32 %2
 | |
| }
 | |
| 
 | |
| 
 | |
| ; Function Attrs: norecurse nounwind readnone
 | |
| define zeroext i64 @TEST_mm512_test_epi8_mask(<8 x i64> %__A, <8 x i64> %__B) local_unnamed_addr #0 {
 | |
| ; CHECK-LABEL: TEST_mm512_test_epi8_mask:
 | |
| ; CHECK:       # %bb.0: # %entry
 | |
| ; CHECK-NEXT:    vptestmb %zmm0, %zmm1, %k0
 | |
| ; CHECK-NEXT:    kmovq %k0, %rax
 | |
| ; CHECK-NEXT:    vzeroupper
 | |
| ; CHECK-NEXT:    retq
 | |
| entry:
 | |
|   %and.i.i = and <8 x i64> %__B, %__A
 | |
|   %0 = bitcast <8 x i64> %and.i.i to <64 x i8>
 | |
|   %1 = icmp ne <64 x i8> %0, zeroinitializer
 | |
|   %2 = bitcast <64 x i1> %1 to i64
 | |
|   ret i64 %2
 | |
| }
 | |
| 
 | |
| ; Function Attrs: norecurse nounwind readnone
 | |
| define zeroext i32 @TEST_mm512_mask_test_epi16_mask(i32 %__U, <8 x i64> %__A, <8 x i64> %__B) local_unnamed_addr #0 {
 | |
| ; CHECK-LABEL: TEST_mm512_mask_test_epi16_mask:
 | |
| ; CHECK:       # %bb.0: # %entry
 | |
| ; CHECK-NEXT:    kmovd %edi, %k1
 | |
| ; CHECK-NEXT:    vptestmw %zmm0, %zmm1, %k0 {%k1}
 | |
| ; CHECK-NEXT:    kmovd %k0, %eax
 | |
| ; CHECK-NEXT:    vzeroupper
 | |
| ; CHECK-NEXT:    retq
 | |
| entry:
 | |
|   %and.i.i = and <8 x i64> %__B, %__A
 | |
|   %0 = bitcast <8 x i64> %and.i.i to <32 x i16>
 | |
|   %1 = icmp ne <32 x i16> %0, zeroinitializer
 | |
|   %2 = bitcast i32 %__U to <32 x i1>
 | |
|   %3 = and <32 x i1> %1, %2
 | |
|   %4 = bitcast <32 x i1> %3 to i32
 | |
|   ret i32 %4
 | |
| }
 | |
| 
 | |
| ; Function Attrs: norecurse nounwind readnone
 | |
| define zeroext i64 @TEST_mm512_mask_test_epi8_mask(i64 %__U, <8 x i64> %__A, <8 x i64> %__B) local_unnamed_addr #0 {
 | |
| ; CHECK-LABEL: TEST_mm512_mask_test_epi8_mask:
 | |
| ; CHECK:       # %bb.0: # %entry
 | |
| ; CHECK-NEXT:    kmovq %rdi, %k1
 | |
| ; CHECK-NEXT:    vptestmb %zmm0, %zmm1, %k0 {%k1}
 | |
| ; CHECK-NEXT:    kmovq %k0, %rax
 | |
| ; CHECK-NEXT:    vzeroupper
 | |
| ; CHECK-NEXT:    retq
 | |
| entry:
 | |
|   %and.i.i = and <8 x i64> %__B, %__A
 | |
|   %0 = bitcast <8 x i64> %and.i.i to <64 x i8>
 | |
|   %1 = icmp ne <64 x i8> %0, zeroinitializer
 | |
|   %2 = bitcast i64 %__U to <64 x i1>
 | |
|   %3 = and <64 x i1> %1, %2
 | |
|   %4 = bitcast <64 x i1> %3 to i64
 | |
|   ret i64 %4
 | |
| }
 | |
| 
 | |
| ; Function Attrs: norecurse nounwind readnone
 | |
| define zeroext i32 @TEST_mm512_testn_epi16_mask(<8 x i64> %__A, <8 x i64> %__B) local_unnamed_addr #0 {
 | |
| ; CHECK-LABEL: TEST_mm512_testn_epi16_mask:
 | |
| ; CHECK:       # %bb.0: # %entry
 | |
| ; CHECK-NEXT:    vptestnmw %zmm0, %zmm1, %k0
 | |
| ; CHECK-NEXT:    kmovd %k0, %eax
 | |
| ; CHECK-NEXT:    vzeroupper
 | |
| ; CHECK-NEXT:    retq
 | |
| entry:
 | |
|   %and.i.i = and <8 x i64> %__B, %__A
 | |
|   %0 = bitcast <8 x i64> %and.i.i to <32 x i16>
 | |
|   %1 = icmp eq <32 x i16> %0, zeroinitializer
 | |
|   %2 = bitcast <32 x i1> %1 to i32
 | |
|   ret i32 %2
 | |
| }
 | |
| 
 | |
| 
 | |
| ; Function Attrs: norecurse nounwind readnone
 | |
| define zeroext i64 @TEST_mm512_testn_epi8_mask(<8 x i64> %__A, <8 x i64> %__B) local_unnamed_addr #0 {
 | |
| ; CHECK-LABEL: TEST_mm512_testn_epi8_mask:
 | |
| ; CHECK:       # %bb.0: # %entry
 | |
| ; CHECK-NEXT:    vptestnmb %zmm0, %zmm1, %k0
 | |
| ; CHECK-NEXT:    kmovq %k0, %rax
 | |
| ; CHECK-NEXT:    vzeroupper
 | |
| ; CHECK-NEXT:    retq
 | |
| entry:
 | |
|   %and.i.i = and <8 x i64> %__B, %__A
 | |
|   %0 = bitcast <8 x i64> %and.i.i to <64 x i8>
 | |
|   %1 = icmp eq <64 x i8> %0, zeroinitializer
 | |
|   %2 = bitcast <64 x i1> %1 to i64
 | |
|   ret i64 %2
 | |
| }
 | |
| 
 | |
| ; Function Attrs: norecurse nounwind readnone
 | |
| define zeroext i32 @TEST_mm512_mask_testn_epi16_mask(i32 %__U, <8 x i64> %__A, <8 x i64> %__B) local_unnamed_addr #0 {
 | |
| ; CHECK-LABEL: TEST_mm512_mask_testn_epi16_mask:
 | |
| ; CHECK:       # %bb.0: # %entry
 | |
| ; CHECK-NEXT:    kmovd %edi, %k1
 | |
| ; CHECK-NEXT:    vptestnmw %zmm0, %zmm1, %k0 {%k1}
 | |
| ; CHECK-NEXT:    kmovd %k0, %eax
 | |
| ; CHECK-NEXT:    vzeroupper
 | |
| ; CHECK-NEXT:    retq
 | |
| entry:
 | |
|   %and.i.i = and <8 x i64> %__B, %__A
 | |
|   %0 = bitcast <8 x i64> %and.i.i to <32 x i16>
 | |
|   %1 = icmp eq <32 x i16> %0, zeroinitializer
 | |
|   %2 = bitcast i32 %__U to <32 x i1>
 | |
|   %3 = and <32 x i1> %1, %2
 | |
|   %4 = bitcast <32 x i1> %3 to i32
 | |
|   ret i32 %4
 | |
| }
 | |
| 
 | |
| ; Function Attrs: norecurse nounwind readnone
 | |
| define zeroext i64 @TEST_mm512_mask_testn_epi8_mask(i64 %__U, <8 x i64> %__A, <8 x i64> %__B) local_unnamed_addr #0 {
 | |
| ; CHECK-LABEL: TEST_mm512_mask_testn_epi8_mask:
 | |
| ; CHECK:       # %bb.0: # %entry
 | |
| ; CHECK-NEXT:    kmovq %rdi, %k1
 | |
| ; CHECK-NEXT:    vptestnmb %zmm0, %zmm1, %k0 {%k1}
 | |
| ; CHECK-NEXT:    kmovq %k0, %rax
 | |
| ; CHECK-NEXT:    vzeroupper
 | |
| ; CHECK-NEXT:    retq
 | |
| entry:
 | |
|   %and.i.i = and <8 x i64> %__B, %__A
 | |
|   %0 = bitcast <8 x i64> %and.i.i to <64 x i8>
 | |
|   %1 = icmp eq <64 x i8> %0, zeroinitializer
 | |
|   %2 = bitcast i64 %__U to <64 x i1>
 | |
|   %3 = and <64 x i1> %1, %2
 | |
|   %4 = bitcast <64 x i1> %3 to i64
 | |
|   ret i64 %4
 | |
| }
 | |
| 
 |