diff --git a/cranelift/filetests/filetests/isa/x64/bmi2.clif b/cranelift/filetests/filetests/isa/x64/bmi2.clif index 2d145fe531a8..f97a233bdbf3 100644 --- a/cranelift/filetests/filetests/isa/x64/bmi2.clif +++ b/cranelift/filetests/filetests/isa/x64/bmi2.clif @@ -269,7 +269,7 @@ block0(v0: i32, v1: i32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; andl %esi, $31, %esi +; andl $0x1f, %esi ; bzhi %edi, %esi, %eax ; movq %rbp, %rsp ; popq %rbp @@ -299,7 +299,7 @@ block0(v0: i64, v1: i64): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; andq %rsi, $63, %rsi +; andq $0x3f, %rsi ; bzhi %rdi, %rsi, %rax ; movq %rbp, %rsp ; popq %rbp @@ -330,7 +330,7 @@ block0(v0: i64, v1: i32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; andl %esi, $31, %esi +; andl $0x1f, %esi ; bzhi 20(%rdi), %esi, %eax ; movq %rbp, %rsp ; popq %rbp diff --git a/cranelift/filetests/filetests/isa/x64/cmp-mem-bug.clif b/cranelift/filetests/filetests/isa/x64/cmp-mem-bug.clif index 2574ddfb3de5..2ff5bbfa7b41 100644 --- a/cranelift/filetests/filetests/isa/x64/cmp-mem-bug.clif +++ b/cranelift/filetests/filetests/isa/x64/cmp-mem-bug.clif @@ -58,7 +58,7 @@ block0(v0: f64, v1: i64): ; ucomisd %xmm1, %xmm0 ; setnp %dil ; setz %al -; andl %edi, %eax, %edi +; andb %al, %dil ; movzbq %dil, %rax ; ucomisd %xmm1, %xmm0 ; movdqa %xmm0, %xmm2 @@ -77,13 +77,13 @@ block0(v0: f64, v1: i64): ; ucomisd %xmm1, %xmm0 ; setnp %dil ; sete %al -; andl %eax, %edi +; andb %al, %dil ; movzbq %dil, %rax ; ucomisd %xmm1, %xmm0 ; movdqa %xmm0, %xmm2 -; jnp 0x2c +; jnp 0x2d ; movsd %xmm2, %xmm0 -; je 0x36 +; je 0x37 ; movsd %xmm2, %xmm0 ; movq %rbp, %rsp ; popq %rbp diff --git a/cranelift/filetests/filetests/isa/x64/i128.clif b/cranelift/filetests/filetests/isa/x64/i128.clif index 46fef1b38232..ae1549931760 100644 --- a/cranelift/filetests/filetests/isa/x64/i128.clif +++ b/cranelift/filetests/filetests/isa/x64/i128.clif @@ -76,9 +76,9 @@ block0(v0: i128, v1: i128): ; movq %rsp, %rbp ; block0: ; movq %rdi, %rax -; andq %rax, %rdx, %rax +; andq %rdx, %rax ; movq %rsi, %rdx -; andq %rdx, %rcx, %rdx +; andq %rcx, %rdx ; movq %rbp, %rsp ; popq %rbp ; ret @@ -367,15 +367,15 @@ block0(v0: i128, v1: i128): ; cmpq %rdx, %rdi ; sbbq %rsi, %rcx, %rsi ; setnb %dil -; andl %eax, %r9d, %eax -; andl %r8d, %r11d, %r8d -; andl %r10d, %r14d, %r10d -; andl %r13d, %ebx, %r13d -; andl %r15d, %edi, %r15d -; andl %eax, %r8d, %eax -; andl %r10d, %r13d, %r10d -; andl %eax, %r10d, %eax -; andl %eax, %r15d, %eax +; andb %r9b, %al +; andb %r11b, %r8b +; andb %r14b, %r10b +; andb %bl, %r13b +; andb %dil, %r15b +; andb %r8b, %al +; andb %r13b, %r10b +; andb %r10b, %al +; andb %r15b, %al ; movq 0(%rsp), %rbx ; movq 8(%rsp), %r12 ; movq 16(%rsp), %r13 @@ -440,15 +440,15 @@ block0(v0: i128, v1: i128): ; cmpq %rdx, %rdi ; sbbq %rcx, %rsi ; setae %dil -; andl %r9d, %eax -; andl %r11d, %r8d -; andl %r14d, %r10d -; andl %ebx, %r13d -; andl %edi, %r15d -; andl %r8d, %eax -; andl %r13d, %r10d -; andl %r10d, %eax -; andl %r15d, %eax +; andb %r9b, %al +; andb %r11b, %r8b +; andb %r14b, %r10b +; andb %bl, %r13b +; andb %dil, %r15b +; andb %r8b, %al +; andb %r13b, %r10b +; andb %r10b, %al +; andb %r15b, %al ; movq (%rsp), %rbx ; movq 8(%rsp), %r12 ; movq 0x10(%rsp), %r13 @@ -757,38 +757,38 @@ block0(v0: i128): ; movq %rdi, %rax ; shrq $1, %rax, %rax ; movabsq $8608480567731124087, %r8 -; andq %rax, %r8, %rax +; andq %r8, %rax ; subq %rdi, %rax, %rdi ; shrq $1, %rax, %rax -; andq %rax, %r8, %rax +; andq %r8, %rax ; subq %rdi, %rax, %rdi ; shrq $1, %rax, %rax -; andq %rax, %r8, %rax +; andq %r8, %rax ; subq %rdi, %rax, %rdi ; movq %rdi, %rax ; shrq $4, %rax, %rax ; addq %rax, %rdi, %rax ; movabsq $1085102592571150095, %rdi -; andq %rax, %rdi, %rax +; andq %rdi, %rax ; movabsq $72340172838076673, %rdx ; imulq %rax, %rdx, %rax ; shrq $56, %rax, %rax ; movq %rsi, %rdi ; shrq $1, %rdi, %rdi ; movabsq $8608480567731124087, %rcx -; andq %rdi, %rcx, %rdi +; andq %rcx, %rdi ; subq %rsi, %rdi, %rsi ; shrq $1, %rdi, %rdi -; andq %rdi, %rcx, %rdi +; andq %rcx, %rdi ; subq %rsi, %rdi, %rsi ; shrq $1, %rdi, %rdi -; andq %rdi, %rcx, %rdi +; andq %rcx, %rdi ; subq %rsi, %rdi, %rsi ; movq %rsi, %rdi ; shrq $4, %rdi, %rdi ; addq %rdi, %rsi, %rdi ; movabsq $1085102592571150095, %r10 -; andq %rdi, %r10, %rdi +; andq %r10, %rdi ; movabsq $72340172838076673, %rcx ; imulq %rdi, %rcx, %rdi ; shrq $56, %rdi, %rdi @@ -859,83 +859,83 @@ block0(v0: i128): ; block0: ; movabsq $6148914691236517205, %rcx ; movq %rsi, %rdx -; andq %rdx, %rcx, %rdx +; andq %rcx, %rdx ; shrq $1, %rsi, %rsi -; andq %rsi, %rcx, %rsi +; andq %rcx, %rsi ; shlq $1, %rdx, %rdx ; orq %rdx, %rsi, %rdx ; movabsq $3689348814741910323, %r9 ; movq %rdx, %r10 -; andq %r10, %r9, %r10 +; andq %r9, %r10 ; shrq $2, %rdx, %rdx -; andq %rdx, %r9, %rdx +; andq %r9, %rdx ; shlq $2, %r10, %r10 ; orq %r10, %rdx, %r10 ; movabsq $1085102592571150095, %rsi ; movq %r10, %rax -; andq %rax, %rsi, %rax +; andq %rsi, %rax ; shrq $4, %r10, %r10 -; andq %r10, %rsi, %r10 +; andq %rsi, %r10 ; shlq $4, %rax, %rax ; orq %rax, %r10, %rax ; movabsq $71777214294589695, %rcx ; movq %rax, %rdx -; andq %rdx, %rcx, %rdx +; andq %rcx, %rdx ; shrq $8, %rax, %rax -; andq %rax, %rcx, %rax +; andq %rcx, %rax ; shlq $8, %rdx, %rdx ; orq %rdx, %rax, %rdx ; movabsq $281470681808895, %r10 ; movq %rdx, %r9 -; andq %r9, %r10, %r9 +; andq %r10, %r9 ; shrq $16, %rdx, %rdx -; andq %rdx, %r10, %rdx +; andq %r10, %rdx ; shlq $16, %r9, %r9 ; orq %r9, %rdx, %r9 ; movabsq $4294967295, %rsi ; movq %r9, %rax -; andq %rax, %rsi, %rax +; andq %rsi, %rax ; shrq $32, %r9, %r9 ; shlq $32, %rax, %rax ; orq %rax, %r9, %rax ; movabsq $6148914691236517205, %rdx ; movq %rdi, %rcx -; andq %rcx, %rdx, %rcx +; andq %rdx, %rcx ; shrq $1, %rdi, %rdi -; andq %rdi, %rdx, %rdi +; andq %rdx, %rdi ; shlq $1, %rcx, %rcx ; orq %rcx, %rdi, %rcx ; movabsq $3689348814741910323, %rdx ; movq %rcx, %r8 -; andq %r8, %rdx, %r8 +; andq %rdx, %r8 ; shrq $2, %rcx, %rcx -; andq %rcx, %rdx, %rcx +; andq %rdx, %rcx ; shlq $2, %r8, %r8 ; orq %r8, %rcx, %r8 ; movabsq $1085102592571150095, %r10 ; movq %r8, %r11 -; andq %r11, %r10, %r11 +; andq %r10, %r11 ; shrq $4, %r8, %r8 -; andq %r8, %r10, %r8 +; andq %r10, %r8 ; shlq $4, %r11, %r11 ; orq %r11, %r8, %r11 ; movabsq $71777214294589695, %rdi ; movq %r11, %rcx -; andq %rcx, %rdi, %rcx +; andq %rdi, %rcx ; shrq $8, %r11, %r11 -; andq %r11, %rdi, %r11 +; andq %rdi, %r11 ; shlq $8, %rcx, %rcx ; orq %rcx, %r11, %rcx ; movabsq $281470681808895, %rdx ; movq %rcx, %r8 -; andq %r8, %rdx, %r8 +; andq %rdx, %r8 ; shrq $16, %rcx, %rcx -; andq %rcx, %rdx, %rcx +; andq %rdx, %rcx ; shlq $16, %r8, %r8 ; orq %r8, %rcx, %r8 ; movabsq $4294967295, %r10 ; movq %r8, %rdx -; andq %rdx, %r10, %rdx +; andq %r10, %rdx ; shrq $32, %r8, %r8 ; shlq $32, %rdx, %rdx ; orq %rdx, %r8, %rdx @@ -1431,7 +1431,7 @@ block0(v0: i8, v1: i128): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $7, %rcx +; andq $7, %rcx ; movq %rdi, %rax ; shlb %cl, %al, %al ; movq %rbp, %rsp diff --git a/cranelift/filetests/filetests/isa/x64/immediates.clif b/cranelift/filetests/filetests/isa/x64/immediates.clif index 3db06044dfd9..2b75700181ab 100644 --- a/cranelift/filetests/filetests/isa/x64/immediates.clif +++ b/cranelift/filetests/filetests/isa/x64/immediates.clif @@ -26,7 +26,7 @@ block0(v0: i64, v1: i64): ; subq %r11, const(0), %r11 ; movq %r11, 0(%rsi) ; movq %rdi, %rax -; andq %rax, const(0), %rax +; andq (%rip), %rax ; movq %rax, 0(%rsi) ; orq %rdi, const(0), %rdi ; movq %rdi, 0(%rsi) diff --git a/cranelift/filetests/filetests/isa/x64/ishl.clif b/cranelift/filetests/filetests/isa/x64/ishl.clif index 32ab2f796270..ea9feb9e2993 100644 --- a/cranelift/filetests/filetests/isa/x64/ishl.clif +++ b/cranelift/filetests/filetests/isa/x64/ishl.clif @@ -360,7 +360,7 @@ block0(v0: i16, v1: i128): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $15, %rcx +; andq $0xf, %rcx ; movq %rdi, %rax ; shlw %cl, %ax, %ax ; movq %rbp, %rsp @@ -391,7 +391,7 @@ block0(v0: i8, v1: i128): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $7, %rcx +; andq $7, %rcx ; movq %rdi, %rax ; shlb %cl, %al, %al ; movq %rbp, %rsp @@ -654,7 +654,7 @@ block0(v0: i16, v1: i64): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $15, %rcx +; andq $0xf, %rcx ; movq %rdi, %rax ; shlw %cl, %ax, %ax ; movq %rbp, %rsp @@ -685,7 +685,7 @@ block0(v0: i16, v1: i32): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $15, %rcx +; andq $0xf, %rcx ; movq %rdi, %rax ; shlw %cl, %ax, %ax ; movq %rbp, %rsp @@ -716,7 +716,7 @@ block0(v0: i16, v1: i16): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $15, %rcx +; andq $0xf, %rcx ; movq %rdi, %rax ; shlw %cl, %ax, %ax ; movq %rbp, %rsp @@ -747,7 +747,7 @@ block0(v0: i16, v1: i8): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $15, %rcx +; andq $0xf, %rcx ; movq %rdi, %rax ; shlw %cl, %ax, %ax ; movq %rbp, %rsp @@ -778,7 +778,7 @@ block0(v0: i8, v1: i64): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $7, %rcx +; andq $7, %rcx ; movq %rdi, %rax ; shlb %cl, %al, %al ; movq %rbp, %rsp @@ -809,7 +809,7 @@ block0(v0: i8, v1: i32): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $7, %rcx +; andq $7, %rcx ; movq %rdi, %rax ; shlb %cl, %al, %al ; movq %rbp, %rsp @@ -840,7 +840,7 @@ block0(v0: i8, v1: i16): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $7, %rcx +; andq $7, %rcx ; movq %rdi, %rax ; shlb %cl, %al, %al ; movq %rbp, %rsp @@ -871,7 +871,7 @@ block0(v0: i8, v1: i8): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $7, %rcx +; andq $7, %rcx ; movq %rdi, %rax ; shlb %cl, %al, %al ; movq %rbp, %rsp diff --git a/cranelift/filetests/filetests/isa/x64/popcnt.clif b/cranelift/filetests/filetests/isa/x64/popcnt.clif index b74ba4b41622..9a0ec48592c5 100644 --- a/cranelift/filetests/filetests/isa/x64/popcnt.clif +++ b/cranelift/filetests/filetests/isa/x64/popcnt.clif @@ -14,19 +14,19 @@ block0(v0: i64): ; movq %rdi, %rax ; shrq $1, %rax, %rax ; movabsq $8608480567731124087, %rdx -; andq %rax, %rdx, %rax +; andq %rdx, %rax ; subq %rdi, %rax, %rdi ; shrq $1, %rax, %rax -; andq %rax, %rdx, %rax +; andq %rdx, %rax ; subq %rdi, %rax, %rdi ; shrq $1, %rax, %rax -; andq %rax, %rdx, %rax +; andq %rdx, %rax ; subq %rdi, %rax, %rdi ; movq %rdi, %rax ; shrq $4, %rax, %rax ; addq %rax, %rdi, %rax ; movabsq $1085102592571150095, %r11 -; andq %rax, %r11, %rax +; andq %r11, %rax ; movabsq $72340172838076673, %rcx ; imulq %rax, %rcx, %rax ; shrq $56, %rax, %rax @@ -77,19 +77,19 @@ block0(v0: i64): ; movq %rdx, %rcx ; shrq $1, %rcx, %rcx ; movabsq $8608480567731124087, %r8 -; andq %rcx, %r8, %rcx +; andq %r8, %rcx ; subq %rdx, %rcx, %rdx ; shrq $1, %rcx, %rcx -; andq %rcx, %r8, %rcx +; andq %r8, %rcx ; subq %rdx, %rcx, %rdx ; shrq $1, %rcx, %rcx -; andq %rcx, %r8, %rcx +; andq %r8, %rcx ; subq %rdx, %rcx, %rdx ; movq %rdx, %rax ; shrq $4, %rax, %rax ; addq %rax, %rdx, %rax ; movabsq $1085102592571150095, %rsi -; andq %rax, %rsi, %rax +; andq %rsi, %rax ; movabsq $72340172838076673, %rdx ; imulq %rax, %rdx, %rax ; shrq $56, %rax, %rax @@ -139,18 +139,18 @@ block0(v0: i32): ; movq %rdi, %rax ; shrl $1, %eax, %eax ; movl $2004318071, %edx -; andl %eax, %edx, %eax +; andl %edx, %eax ; subl %edi, %eax, %edi ; shrl $1, %eax, %eax -; andl %eax, %edx, %eax +; andl %edx, %eax ; subl %edi, %eax, %edi ; shrl $1, %eax, %eax -; andl %eax, %edx, %eax +; andl %edx, %eax ; subl %edi, %eax, %edi ; movq %rdi, %r9 ; shrl $4, %r9d, %r9d ; addl %r9d, %edi, %r9d -; andl %r9d, $252645135, %r9d +; andl $0xf0f0f0f, %r9d ; imull %r9d, 0x1010101, %eax ; shrl $24, %eax, %eax ; movq %rbp, %rsp @@ -198,18 +198,18 @@ block0(v0: i64): ; movq %rax, %rcx ; shrl $1, %ecx, %ecx ; movl $2004318071, %r8d -; andl %ecx, %r8d, %ecx +; andl %r8d, %ecx ; subl %eax, %ecx, %eax ; shrl $1, %ecx, %ecx -; andl %ecx, %r8d, %ecx +; andl %r8d, %ecx ; subl %eax, %ecx, %eax ; shrl $1, %ecx, %ecx -; andl %ecx, %r8d, %ecx +; andl %r8d, %ecx ; subl %eax, %ecx, %eax ; movq %rax, %r10 ; shrl $4, %r10d, %r10d ; addl %r10d, %eax, %r10d -; andl %r10d, $252645135, %r10d +; andl $0xf0f0f0f, %r10d ; imull %r10d, 0x1010101, %eax ; shrl $24, %eax, %eax ; movq %rbp, %rsp diff --git a/cranelift/filetests/filetests/isa/x64/simd-arith-avx.clif b/cranelift/filetests/filetests/isa/x64/simd-arith-avx.clif index 12e3ce7c3f5d..e03bf86581f1 100644 --- a/cranelift/filetests/filetests/isa/x64/simd-arith-avx.clif +++ b/cranelift/filetests/filetests/isa/x64/simd-arith-avx.clif @@ -914,7 +914,7 @@ block0(v0: i8x16, v1: i32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; andq %rdi, $7, %rdi +; andq $7, %rdi ; vpunpcklbw %xmm0, %xmm0, %xmm5 ; vpunpckhbw %xmm0, %xmm0, %xmm7 ; addl %edi, $8, %edi @@ -987,7 +987,7 @@ block0(v0: i16x8, v1: i32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; andq %rdi, $15, %rdi +; andq $0xf, %rdi ; vmovd %edi, %xmm5 ; vpsraw %xmm0, %xmm5, %xmm0 ; movq %rbp, %rsp @@ -1042,7 +1042,7 @@ block0(v0: i32x4, v1: i32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; andq %rdi, $31, %rdi +; andq $0x1f, %rdi ; vmovd %edi, %xmm5 ; vpsrad %xmm0, %xmm5, %xmm0 ; movq %rbp, %rsp @@ -1381,7 +1381,7 @@ block0(v0: i8x16, v1: i32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; andq %rdi, $7, %rdi +; andq $7, %rdi ; vmovd %edi, %xmm5 ; vpsllw %xmm0, %xmm5, %xmm7 ; lea const(0), %rsi @@ -1400,7 +1400,7 @@ block0(v0: i8x16, v1: i32): ; andq $7, %rdi ; vmovd %edi, %xmm5 ; vpsllw %xmm5, %xmm0, %xmm7 -; leaq 0x19(%rip), %rsi +; leaq 0x16(%rip), %rsi ; shlq $4, %rdi ; vmovdqu (%rsi, %rdi), %xmm5 ; vpand %xmm5, %xmm7, %xmm0 @@ -1409,8 +1409,6 @@ block0(v0: i8x16, v1: i32): ; retq ; addb %al, (%rax) ; addb %al, (%rax) -; addb %al, (%rax) -; addb %bh, %bh function %i8x16_shl_imm(i8x16) -> i8x16 { block0(v0: i8x16): @@ -1455,7 +1453,7 @@ block0(v0: i16x8, v1: i32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; andq %rdi, $15, %rdi +; andq $0xf, %rdi ; vmovd %edi, %xmm5 ; vpsllw %xmm0, %xmm5, %xmm0 ; movq %rbp, %rsp @@ -1510,7 +1508,7 @@ block0(v0: i32x4, v1: i32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; andq %rdi, $31, %rdi +; andq $0x1f, %rdi ; vmovd %edi, %xmm5 ; vpslld %xmm0, %xmm5, %xmm0 ; movq %rbp, %rsp @@ -1565,7 +1563,7 @@ block0(v0: i64x2, v1: i32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; andq %rdi, $63, %rdi +; andq $0x3f, %rdi ; vmovd %edi, %xmm5 ; vpsllq %xmm0, %xmm5, %xmm0 ; movq %rbp, %rsp @@ -1620,7 +1618,7 @@ block0(v0: i8x16, v1: i32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; andq %rdi, $7, %rdi +; andq $7, %rdi ; vmovd %edi, %xmm5 ; vpsrlw %xmm0, %xmm5, %xmm7 ; lea const(0), %rsi @@ -1638,7 +1636,7 @@ block0(v0: i8x16, v1: i32): ; andq $7, %rdi ; vmovd %edi, %xmm5 ; vpsrlw %xmm5, %xmm0, %xmm7 -; leaq 0x19(%rip), %rsi +; leaq 0x16(%rip), %rsi ; shlq $4, %rdi ; vpand (%rsi, %rdi), %xmm7, %xmm0 ; movq %rbp, %rsp @@ -1648,8 +1646,6 @@ block0(v0: i8x16, v1: i32): ; addb %al, (%rax) ; addb %al, (%rax) ; addb %al, (%rax) -; addb %al, (%rax) -; addb %bh, %bh function %i8x16_ushr_imm(i8x16) -> i8x16 { block0(v0: i8x16): @@ -1702,7 +1698,7 @@ block0(v0: i16x8, v1: i32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; andq %rdi, $15, %rdi +; andq $0xf, %rdi ; vmovd %edi, %xmm5 ; vpsrlw %xmm0, %xmm5, %xmm0 ; movq %rbp, %rsp @@ -1757,7 +1753,7 @@ block0(v0: i32x4, v1: i32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; andq %rdi, $31, %rdi +; andq $0x1f, %rdi ; vmovd %edi, %xmm5 ; vpsrld %xmm0, %xmm5, %xmm0 ; movq %rbp, %rsp @@ -1812,7 +1808,7 @@ block0(v0: i64x2, v1: i32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; andq %rdi, $63, %rdi +; andq $0x3f, %rdi ; vmovd %edi, %xmm5 ; vpsrlq %xmm0, %xmm5, %xmm0 ; movq %rbp, %rsp diff --git a/cranelift/filetests/filetests/isa/x64/simd-bitwise-compile.clif b/cranelift/filetests/filetests/isa/x64/simd-bitwise-compile.clif index c0ec635df5da..0b8328102747 100644 --- a/cranelift/filetests/filetests/isa/x64/simd-bitwise-compile.clif +++ b/cranelift/filetests/filetests/isa/x64/simd-bitwise-compile.clif @@ -325,7 +325,7 @@ block0(v0: i32): ; movq %rsp, %rbp ; block0: ; movdqu const(1), %xmm0 -; andq %rdi, $7, %rdi +; andq $7, %rdi ; movd %edi, %xmm5 ; psllw %xmm0, %xmm5, %xmm0 ; lea const(0), %rsi @@ -345,7 +345,7 @@ block0(v0: i32): ; andq $7, %rdi ; movd %edi, %xmm5 ; psllw %xmm5, %xmm0 -; leaq 0x31(%rip), %rsi +; leaq 0x2e(%rip), %rsi ; shlq $4, %rdi ; movdqu (%rsi, %rdi), %xmm5 ; pand %xmm5, %xmm0 @@ -358,12 +358,9 @@ block0(v0: i32): ; addb %al, (%rax) ; addb %al, (%rax) ; addb %al, (%rax) -; addb %al, (%rax) -; addb %al, (%rax) -; addl %eax, (%rdx) -; addl 0x9080706(, %rax), %eax -; orb (%rbx), %cl -; orb $0xd, %al +; addb %al, (%rcx) +; addb (%rbx), %al +; addb $5, %al function %ishl_i8x16_imm(i8x16) -> i8x16 { block0(v0: i8x16): @@ -601,7 +598,7 @@ block0(v0: i32): ; movq %rsp, %rbp ; block0: ; movdqu const(0), %xmm1 -; andq %rdi, $7, %rdi +; andq $7, %rdi ; movdqa %xmm1, %xmm0 ; punpcklbw %xmm0, %xmm1, %xmm0 ; punpckhbw %xmm1, %xmm1, %xmm1 @@ -637,10 +634,10 @@ block0(v0: i32): ; addb %al, (%rax) ; addb %al, (%rax) ; addb %al, (%rax) -; addb %al, (%rax) -; addb %al, (%rcx) -; addb (%rbx), %al -; addb $5, %al +; addl %eax, (%rdx) +; addl 0x9080706(, %rax), %eax +; orb (%rbx), %cl +; orb $0xd, %al function %sshr_i8x16_imm(i8x16, i32) -> i8x16 { block0(v0: i8x16, v1: i32): @@ -885,7 +882,7 @@ block0(v0: i64x2, v1: i32): ; pushq %rbp ; movq %rsp, %rbp ; block0: -; andq %rdi, $63, %rdi +; andq $0x3f, %rdi ; movq %rdi, %xmm5 ; movdqu const(0), %xmm1 ; psrlq %xmm1, %xmm5, %xmm1 @@ -905,7 +902,7 @@ block0(v0: i64x2, v1: i32): ; block1: ; offset 0x4 ; andq $0x3f, %rdi ; movq %rdi, %xmm5 -; movdqu 0x2b(%rip), %xmm1 +; movdqu 0x28(%rip), %xmm1 ; psrlq %xmm5, %xmm1 ; psrlq %xmm5, %xmm0 ; movdqa %xmm0, %xmm7 @@ -924,7 +921,7 @@ block0(v0: i64x2, v1: i32): ; addb %al, (%rax) ; addb %al, (%rax) ; addb %al, (%rax) -; addb %al, (%rax) +; addb $0, (%rax) ; addb %al, (%rax) ; addb %al, (%rax) diff --git a/cranelift/filetests/filetests/isa/x64/simd-i64x2-shift-avx512.clif b/cranelift/filetests/filetests/isa/x64/simd-i64x2-shift-avx512.clif index d7a6796b9138..f12ce6bc9049 100644 --- a/cranelift/filetests/filetests/isa/x64/simd-i64x2-shift-avx512.clif +++ b/cranelift/filetests/filetests/isa/x64/simd-i64x2-shift-avx512.clif @@ -15,10 +15,10 @@ block0(v0: i64x2, v1: i64): ; movq %rsp, %rbp ; block0: ; movq %rdi, %r9 -; andq %r9, $63, %r9 +; andq $0x3f, %r9 ; vmovd %r9d, %xmm1 ; vpsraq %xmm1, %xmm0, %xmm0 -; andq %rdi, $63, %rdi +; andq $0x3f, %rdi ; vmovd %edi, %xmm1 ; vpsraq %xmm1, %xmm0, %xmm1 ; movq %rbp, %rsp diff --git a/cranelift/filetests/filetests/isa/x64/sshr.clif b/cranelift/filetests/filetests/isa/x64/sshr.clif index b44ad0c35ed6..53d0d456ea17 100644 --- a/cranelift/filetests/filetests/isa/x64/sshr.clif +++ b/cranelift/filetests/filetests/isa/x64/sshr.clif @@ -399,7 +399,7 @@ block0(v0: i16, v1: i128): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $15, %rcx +; andq $0xf, %rcx ; movq %rdi, %rax ; sarw %cl, %ax, %ax ; movq %rbp, %rsp @@ -430,7 +430,7 @@ block0(v0: i8, v1: i128): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $7, %rcx +; andq $7, %rcx ; movq %rdi, %rax ; sarb %cl, %al, %al ; movq %rbp, %rsp @@ -693,7 +693,7 @@ block0(v0: i16, v1: i64): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $15, %rcx +; andq $0xf, %rcx ; movq %rdi, %rax ; sarw %cl, %ax, %ax ; movq %rbp, %rsp @@ -724,7 +724,7 @@ block0(v0: i16, v1: i32): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $15, %rcx +; andq $0xf, %rcx ; movq %rdi, %rax ; sarw %cl, %ax, %ax ; movq %rbp, %rsp @@ -755,7 +755,7 @@ block0(v0: i16, v1: i16): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $15, %rcx +; andq $0xf, %rcx ; movq %rdi, %rax ; sarw %cl, %ax, %ax ; movq %rbp, %rsp @@ -786,7 +786,7 @@ block0(v0: i16, v1: i8): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $15, %rcx +; andq $0xf, %rcx ; movq %rdi, %rax ; sarw %cl, %ax, %ax ; movq %rbp, %rsp @@ -817,7 +817,7 @@ block0(v0: i8, v1: i64): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $7, %rcx +; andq $7, %rcx ; movq %rdi, %rax ; sarb %cl, %al, %al ; movq %rbp, %rsp @@ -848,7 +848,7 @@ block0(v0: i8, v1: i32): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $7, %rcx +; andq $7, %rcx ; movq %rdi, %rax ; sarb %cl, %al, %al ; movq %rbp, %rsp @@ -879,7 +879,7 @@ block0(v0: i8, v1: i16): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $7, %rcx +; andq $7, %rcx ; movq %rdi, %rax ; sarb %cl, %al, %al ; movq %rbp, %rsp @@ -910,7 +910,7 @@ block0(v0: i8, v1: i8): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $7, %rcx +; andq $7, %rcx ; movq %rdi, %rax ; sarb %cl, %al, %al ; movq %rbp, %rsp diff --git a/cranelift/filetests/filetests/isa/x64/ushr.clif b/cranelift/filetests/filetests/isa/x64/ushr.clif index 8c760f2d48f0..a26ba9868afa 100644 --- a/cranelift/filetests/filetests/isa/x64/ushr.clif +++ b/cranelift/filetests/filetests/isa/x64/ushr.clif @@ -369,7 +369,7 @@ block0(v0: i16, v1: i128): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $15, %rcx +; andq $0xf, %rcx ; movq %rdi, %rax ; shrw %cl, %ax, %ax ; movq %rbp, %rsp @@ -400,7 +400,7 @@ block0(v0: i8, v1: i128): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $7, %rcx +; andq $7, %rcx ; movq %rdi, %rax ; shrb %cl, %al, %al ; movq %rbp, %rsp @@ -663,7 +663,7 @@ block0(v0: i16, v1: i64): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $15, %rcx +; andq $0xf, %rcx ; movq %rdi, %rax ; shrw %cl, %ax, %ax ; movq %rbp, %rsp @@ -694,7 +694,7 @@ block0(v0: i16, v1: i32): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $15, %rcx +; andq $0xf, %rcx ; movq %rdi, %rax ; shrw %cl, %ax, %ax ; movq %rbp, %rsp @@ -725,7 +725,7 @@ block0(v0: i16, v1: i16): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $15, %rcx +; andq $0xf, %rcx ; movq %rdi, %rax ; shrw %cl, %ax, %ax ; movq %rbp, %rsp @@ -756,7 +756,7 @@ block0(v0: i16, v1: i8): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $15, %rcx +; andq $0xf, %rcx ; movq %rdi, %rax ; shrw %cl, %ax, %ax ; movq %rbp, %rsp @@ -787,7 +787,7 @@ block0(v0: i8, v1: i64): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $7, %rcx +; andq $7, %rcx ; movq %rdi, %rax ; shrb %cl, %al, %al ; movq %rbp, %rsp @@ -818,7 +818,7 @@ block0(v0: i8, v1: i32): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $7, %rcx +; andq $7, %rcx ; movq %rdi, %rax ; shrb %cl, %al, %al ; movq %rbp, %rsp @@ -849,7 +849,7 @@ block0(v0: i8, v1: i16): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $7, %rcx +; andq $7, %rcx ; movq %rdi, %rax ; shrb %cl, %al, %al ; movq %rbp, %rsp @@ -880,7 +880,7 @@ block0(v0: i8, v1: i8): ; movq %rsp, %rbp ; block0: ; movq %rsi, %rcx -; andq %rcx, $7, %rcx +; andq $7, %rcx ; movq %rdi, %rax ; shrb %cl, %al, %al ; movq %rbp, %rsp diff --git a/cranelift/filetests/filetests/isa/x64/winch.clif b/cranelift/filetests/filetests/isa/x64/winch.clif index 1e250b608958..2bda2fd16b7d 100644 --- a/cranelift/filetests/filetests/isa/x64/winch.clif +++ b/cranelift/filetests/filetests/isa/x64/winch.clif @@ -303,7 +303,7 @@ block0(v0:i64): ; call *%r10 ; movq 4(%rsp), %rax ; movq 0(%rsp), %r9 -; andl %eax, %r9d, %eax +; andl %r9d, %eax ; movq 16(%rsp), %rbx ; movq 24(%rsp), %r12 ; movq 32(%rsp), %r13