diff --git a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp index 846768f6d631e..cf682d9129e13 100644 --- a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp +++ b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp @@ -17141,6 +17141,14 @@ bool RISCVTargetLowering::isDesirableToCommuteWithShift( return false; } } + + // Don't break slli.uw patterns. + if (Subtarget.hasStdExtZba() && Ty.isScalarInteger() && + N->getOpcode() == ISD::SHL && N0.getOpcode() == ISD::AND && + isa(N0.getOperand(1)) && + N0.getConstantOperandVal(1) == UINT64_C(0xffffffff)) + return false; + return true; } diff --git a/llvm/test/CodeGen/RISCV/rv64zba.ll b/llvm/test/CodeGen/RISCV/rv64zba.ll index 8fe221f2a297a..a0a7db538e835 100644 --- a/llvm/test/CodeGen/RISCV/rv64zba.ll +++ b/llvm/test/CodeGen/RISCV/rv64zba.ll @@ -2866,8 +2866,7 @@ define ptr @gep_lshr_i32(ptr %0, i64 %1) { ; ; RV64ZBA-LABEL: gep_lshr_i32: ; RV64ZBA: # %bb.0: # %entry -; RV64ZBA-NEXT: slli a1, a1, 2 -; RV64ZBA-NEXT: srli a1, a1, 4 +; RV64ZBA-NEXT: srli a1, a1, 2 ; RV64ZBA-NEXT: slli.uw a1, a1, 4 ; RV64ZBA-NEXT: sh2add a1, a1, a1 ; RV64ZBA-NEXT: add a0, a0, a1 @@ -2891,8 +2890,7 @@ define i64 @srli_slliw(i64 %1) { ; ; RV64ZBA-LABEL: srli_slliw: ; RV64ZBA: # %bb.0: # %entry -; RV64ZBA-NEXT: slli a0, a0, 2 -; RV64ZBA-NEXT: srli a0, a0, 4 +; RV64ZBA-NEXT: srli a0, a0, 2 ; RV64ZBA-NEXT: slli.uw a0, a0, 4 ; RV64ZBA-NEXT: ret entry: