From 9bf96350a14abea0ed9da70878927743c81ee966 Mon Sep 17 00:00:00 2001 From: Anzooooo Date: Wed, 18 Dec 2024 13:53:23 +0800 Subject: [PATCH] area(LoadQueue): remove useless regs Vector Load's additional release logic in the `RAR/RAW Queue` looks unneeded, which would result in the `RAR/RAW Queue` storing redundant `regs` for `uopidx`. --- src/main/scala/xiangshan/mem/lsqueue/LoadQueue.scala | 2 -- .../scala/xiangshan/mem/lsqueue/LoadQueueRAR.scala | 10 +--------- .../scala/xiangshan/mem/lsqueue/LoadQueueRAW.scala | 10 +--------- 3 files changed, 2 insertions(+), 20 deletions(-) diff --git a/src/main/scala/xiangshan/mem/lsqueue/LoadQueue.scala b/src/main/scala/xiangshan/mem/lsqueue/LoadQueue.scala index c6012df65c..6556be4c5d 100644 --- a/src/main/scala/xiangshan/mem/lsqueue/LoadQueue.scala +++ b/src/main/scala/xiangshan/mem/lsqueue/LoadQueue.scala @@ -216,7 +216,6 @@ class LoadQueue(implicit p: Parameters) extends XSModule * LoadQueueRAR */ loadQueueRAR.io.redirect <> io.redirect - loadQueueRAR.io.vecFeedback <> io.vecFeedback loadQueueRAR.io.release <> io.release loadQueueRAR.io.ldWbPtr <> virtualLoadQueue.io.ldWbPtr for (w <- 0 until LoadPipelineWidth) { @@ -229,7 +228,6 @@ class LoadQueue(implicit p: Parameters) extends XSModule * LoadQueueRAW */ loadQueueRAW.io.redirect <> io.redirect - loadQueueRAW.io.vecFeedback <> io.vecFeedback loadQueueRAW.io.storeIn <> io.sta.storeAddrIn loadQueueRAW.io.stAddrReadySqPtr <> io.sq.stAddrReadySqPtr loadQueueRAW.io.stIssuePtr <> io.sq.stIssuePtr diff --git a/src/main/scala/xiangshan/mem/lsqueue/LoadQueueRAR.scala b/src/main/scala/xiangshan/mem/lsqueue/LoadQueueRAR.scala index 8aa976ea58..e573c84585 100644 --- a/src/main/scala/xiangshan/mem/lsqueue/LoadQueueRAR.scala +++ b/src/main/scala/xiangshan/mem/lsqueue/LoadQueueRAR.scala @@ -34,7 +34,6 @@ class LoadQueueRAR(implicit p: Parameters) extends XSModule val io = IO(new Bundle() { // control val redirect = Flipped(Valid(new Redirect)) - val vecFeedback = Vec(VecLoadPipelineWidth, Flipped(ValidIO(new FeedbackToLsqIO))) // violation query val query = Vec(LoadPipelineWidth, Flipped(new LoadNukeQueryIO)) @@ -192,18 +191,11 @@ class LoadQueueRAR(implicit p: Parameters) extends XSModule // when the loads that "older than" current load were writebacked, // current load will be released. - val vecLdCanceltmp = Wire(Vec(LoadQueueRARSize, Vec(VecLoadPipelineWidth, Bool()))) - val vecLdCancel = Wire(Vec(LoadQueueRARSize, Bool())) for (i <- 0 until LoadQueueRARSize) { val deqNotBlock = !isBefore(io.ldWbPtr, uop(i).lqIdx) val needFlush = uop(i).robIdx.needFlush(io.redirect) - val fbk = io.vecFeedback - for (j <- 0 until VecLoadPipelineWidth) { - vecLdCanceltmp(i)(j) := allocated(i) && fbk(j).valid && fbk(j).bits.isFlush && uop(i).robIdx === fbk(j).bits.robidx && uop(i).uopIdx === fbk(j).bits.uopidx - } - vecLdCancel(i) := vecLdCanceltmp(i).reduce(_ || _) - when (allocated(i) && (deqNotBlock || needFlush || vecLdCancel(i))) { + when (allocated(i) && (deqNotBlock || needFlush)) { allocated(i) := false.B freeMaskVec(i) := true.B } diff --git a/src/main/scala/xiangshan/mem/lsqueue/LoadQueueRAW.scala b/src/main/scala/xiangshan/mem/lsqueue/LoadQueueRAW.scala index 59cbb5a095..717ae97242 100644 --- a/src/main/scala/xiangshan/mem/lsqueue/LoadQueueRAW.scala +++ b/src/main/scala/xiangshan/mem/lsqueue/LoadQueueRAW.scala @@ -37,7 +37,6 @@ class LoadQueueRAW(implicit p: Parameters) extends XSModule val io = IO(new Bundle() { // control val redirect = Flipped(ValidIO(new Redirect)) - val vecFeedback = Vec(VecLoadPipelineWidth, Flipped(ValidIO(new FeedbackToLsqIO))) // violation query val query = Vec(LoadPipelineWidth, Flipped(new LoadNukeQueryIO)) @@ -178,18 +177,11 @@ class LoadQueueRAW(implicit p: Parameters) extends XSModule // when the stores that "older than" current load address were ready. // current load will be released. - val vecLdCanceltmp = Wire(Vec(LoadQueueRAWSize, Vec(VecLoadPipelineWidth, Bool()))) - val vecLdCancel = Wire(Vec(LoadQueueRAWSize, Bool())) for (i <- 0 until LoadQueueRAWSize) { val deqNotBlock = Mux(!allAddrCheck, !isBefore(io.stAddrReadySqPtr, uop(i).sqIdx), true.B) val needCancel = uop(i).robIdx.needFlush(io.redirect) - val fbk = io.vecFeedback - for (j <- 0 until VecLoadPipelineWidth) { - vecLdCanceltmp(i)(j) := allocated(i) && fbk(j).valid && fbk(j).bits.isFlush && uop(i).robIdx === fbk(j).bits.robidx && uop(i).uopIdx === fbk(j).bits.uopidx - } - vecLdCancel(i) := vecLdCanceltmp(i).reduce(_ || _) - when (allocated(i) && (deqNotBlock || needCancel || vecLdCancel(i))) { + when (allocated(i) && (deqNotBlock || needCancel)) { allocated(i) := false.B freeMaskVec(i) := true.B }