From 600d77d67c37654f75b9ee618bbd5ed1bfeada8b Mon Sep 17 00:00:00 2001 From: Easton Man Date: Tue, 12 Nov 2024 20:50:59 +0800 Subject: [PATCH 1/7] feat(logging): use scala-logging for logging infra --- build.sc | 2 ++ src/main/resources/logback-test.xml | 10 ++++++++ src/main/scala/xiangshan/Parameters.scala | 3 ++- .../xiangshan/frontend/icache/ICache.scala | 24 +++++++++---------- 4 files changed, 26 insertions(+), 13 deletions(-) create mode 100644 src/main/resources/logback-test.xml diff --git a/build.sc b/build.sc index c72f52d4f91..6f584885a3e 100644 --- a/build.sc +++ b/build.sc @@ -259,6 +259,8 @@ object xiangshan extends XiangShanModule with HasChisel with ScalafmtModule { override def ivyDeps = super.ivyDeps() ++ Agg( defaultVersions("chiseltest"), + ivy"com.typesafe.scala-logging::scala-logging:3.9.5", + ivy"ch.qos.logback:logback-classic:1.5.12", ) override def scalacOptions = super.scalacOptions() ++ Agg("-deprecation", "-feature") diff --git a/src/main/resources/logback-test.xml b/src/main/resources/logback-test.xml new file mode 100644 index 00000000000..7a25e95b026 --- /dev/null +++ b/src/main/resources/logback-test.xml @@ -0,0 +1,10 @@ + + + + %gray(%d{yyyy-MM-dd HH:mm:ss}) %cyan(%logger{24}) %highlight([%-5level]) %msg%n + + + + + + diff --git a/src/main/scala/xiangshan/Parameters.scala b/src/main/scala/xiangshan/Parameters.scala index 7d5dc498111..acbfa50400f 100644 --- a/src/main/scala/xiangshan/Parameters.scala +++ b/src/main/scala/xiangshan/Parameters.scala @@ -46,6 +46,7 @@ import coupledL2._ import coupledL2.tl2chi._ import xiangshan.backend.datapath.WakeUpConfig import xiangshan.mem.prefetch.{PrefetcherParams, SMSParams} +import com.typesafe.scalalogging.LazyLogging import scala.math.{max, min, pow} @@ -580,7 +581,7 @@ case class DebugOptions EnableRollingDB: Boolean = false ) -trait HasXSParameter { +trait HasXSParameter extends LazyLogging { implicit val p: Parameters diff --git a/src/main/scala/xiangshan/frontend/icache/ICache.scala b/src/main/scala/xiangshan/frontend/icache/ICache.scala index 7520361f3e7..a0b1054fd0b 100644 --- a/src/main/scala/xiangshan/frontend/icache/ICache.scala +++ b/src/main/scala/xiangshan/frontend/icache/ICache.scala @@ -558,18 +558,18 @@ class ICache()(implicit p: Parameters) extends LazyModule with HasICacheParamete class ICacheImp(outer: ICache) extends LazyModuleImp(outer) with HasICacheParameters with HasPerfEvents { val io: ICacheIO = IO(new ICacheIO) - println("ICache:") - println(" TagECC: " + cacheParams.tagECC) - println(" DataECC: " + cacheParams.dataECC) - println(" ICacheSets: " + cacheParams.nSets) - println(" ICacheWays: " + cacheParams.nWays) - println(" PortNumber: " + cacheParams.PortNumber) - println(" nFetchMshr: " + cacheParams.nFetchMshr) - println(" nPrefetchMshr: " + cacheParams.nPrefetchMshr) - println(" nWayLookupSize: " + cacheParams.nWayLookupSize) - println(" DataCodeUnit: " + cacheParams.DataCodeUnit) - println(" ICacheDataBanks: " + cacheParams.ICacheDataBanks) - println(" ICacheDataSRAMWidth: " + cacheParams.ICacheDataSRAMWidth) + logger.info("ICache:") + logger.info(" TagECC: " + cacheParams.tagECC) + logger.info(" DataECC: " + cacheParams.dataECC) + logger.info(" ICacheSets: " + cacheParams.nSets) + logger.info(" ICacheWays: " + cacheParams.nWays) + logger.info(" PortNumber: " + cacheParams.PortNumber) + logger.info(" nFetchMshr: " + cacheParams.nFetchMshr) + logger.info(" nPrefetchMshr: " + cacheParams.nPrefetchMshr) + logger.info(" nWayLookupSize: " + cacheParams.nWayLookupSize) + logger.info(" DataCodeUnit: " + cacheParams.DataCodeUnit) + logger.info(" ICacheDataBanks: " + cacheParams.ICacheDataBanks) + logger.info(" ICacheDataSRAMWidth: " + cacheParams.ICacheDataSRAMWidth) val (bus, edge) = outer.clientNode.out.head From 9b7a19f42ec941c4ab56c5cbb4f1582d3f1fba36 Mon Sep 17 00:00:00 2001 From: Easton Man Date: Sat, 16 Nov 2024 20:29:05 +0800 Subject: [PATCH 2/7] feat(logging): subsitute more println for logger --- build.sc | 14 +++-- src/main/resources/logback-test.xml | 2 +- .../scala/xiangshan/backend/Backend.scala | 62 +++++++++---------- .../xiangshan/backend/BackendParams.scala | 13 ++-- .../xiangshan/backend/exu/ExeUnitParams.scala | 10 +-- .../backend/fu/NewCSR/MachineLevel.scala | 7 ++- .../xiangshan/backend/fu/NewCSR/NewCSR.scala | 4 +- .../xiangshan/backend/issue/Dispatch2Iq.scala | 2 +- .../backend/rename/CompressUnit.scala | 2 +- .../xiangshan/backend/rename/Rename.scala | 2 +- src/main/scala/xiangshan/frontend/BPU.scala | 2 +- .../scala/xiangshan/frontend/Composer.scala | 4 +- .../scala/xiangshan/frontend/Frontend.scala | 2 +- .../scala/xiangshan/frontend/ITTAGE.scala | 2 +- .../scala/xiangshan/frontend/NewFtq.scala | 2 +- src/main/scala/xiangshan/frontend/Tage.scala | 2 +- 16 files changed, 71 insertions(+), 61 deletions(-) diff --git a/build.sc b/build.sc index 6f584885a3e..4100e8b6b69 100644 --- a/build.sc +++ b/build.sc @@ -63,6 +63,14 @@ trait HasChisel extends SbtModule { override def scalacPluginIvyDeps = super.scalacPluginIvyDeps() ++ Agg(chiselPluginIvy.get) } +trait HasLogging extends SbtModule { + override def ivyDeps: T[Agg[Dep]] = super.ivyDeps() ++ Agg( + ivy"com.typesafe.scala-logging::scala-logging:3.9.5", + ivy"ch.qos.logback:logback-classic:1.5.12", + ivy"org.fusesource.jansi:jansi:1.17", + ) +} + object rocketchip extends $file.`rocket-chip`.common.RocketChipModule with HasChisel { @@ -109,7 +117,7 @@ object rocketchip } } -object utility extends HasChisel { +object utility extends HasChisel with HasLogging { override def millSourcePath = pwd / "utility" @@ -233,7 +241,7 @@ trait XiangShanModule extends ScalaModule { override def forkEnv = Map("PATH" -> envPATH) } -object xiangshan extends XiangShanModule with HasChisel with ScalafmtModule { +object xiangshan extends XiangShanModule with HasChisel with HasLogging with ScalafmtModule { override def millSourcePath = pwd @@ -259,8 +267,6 @@ object xiangshan extends XiangShanModule with HasChisel with ScalafmtModule { override def ivyDeps = super.ivyDeps() ++ Agg( defaultVersions("chiseltest"), - ivy"com.typesafe.scala-logging::scala-logging:3.9.5", - ivy"ch.qos.logback:logback-classic:1.5.12", ) override def scalacOptions = super.scalacOptions() ++ Agg("-deprecation", "-feature") diff --git a/src/main/resources/logback-test.xml b/src/main/resources/logback-test.xml index 7a25e95b026..aee4d2c0f1e 100644 --- a/src/main/resources/logback-test.xml +++ b/src/main/resources/logback-test.xml @@ -1,7 +1,7 @@ - %gray(%d{yyyy-MM-dd HH:mm:ss}) %cyan(%logger{24}) %highlight([%-5level]) %msg%n + %gray(%d{yyyy-MM-dd HH:mm:ss}) %highlight(%-5level) %cyan(%-24logger{24}) | %msg%n diff --git a/src/main/scala/xiangshan/backend/Backend.scala b/src/main/scala/xiangshan/backend/Backend.scala index 483b4c42774..9a2ee506639 100644 --- a/src/main/scala/xiangshan/backend/Backend.scala +++ b/src/main/scala/xiangshan/backend/Backend.scala @@ -83,7 +83,7 @@ class BackendInlined(val params: BackendParams)(implicit p: Parameters) extends ibp.updateIdx(idx) } - println(params.iqWakeUpParams) + logger.debug(s"${params.iqWakeUpParams}") for ((schdCfg, i) <- params.allSchdParams.zipWithIndex) { schdCfg.bindBackendParam(params) @@ -99,14 +99,14 @@ class BackendInlined(val params: BackendParams)(implicit p: Parameters) extends exuCfg.updateExuIdx(i) } - println("[Backend] ExuConfigs:") + logger.debug("ExuConfigs:") for (exuCfg <- params.allExuParams) { val fuConfigs = exuCfg.fuConfigs val wbPortConfigs = exuCfg.wbPortConfigs val immType = exuCfg.immType - println("[Backend] " + - s"${exuCfg.name}: " + + logger.debug( + s" ${exuCfg.name}: " + (if (exuCfg.fakeUnit) "fake, " else "") + (if (exuCfg.hasLoadFu || exuCfg.hasHyldaFu) s"LdExuIdx(${backendParams.getLdExuIdx(exuCfg)})" else "") + s"${fuConfigs.map(_.name).mkString("fu(s): {", ",", "}")}, " + @@ -132,47 +132,47 @@ class BackendInlined(val params: BackendParams)(implicit p: Parameters) extends ) } - println(s"[Backend] all fu configs") + logger.trace(s"all fu configs") for (cfg <- FuConfig.allConfigs) { - println(s"[Backend] $cfg") + logger.trace(s" $cfg") } - println(s"[Backend] Int RdConfigs: ExuName(Priority)") + logger.trace(s"Int RdConfigs: ExuName(Priority)") for ((port, seq) <- params.getRdPortParams(IntData())) { - println(s"[Backend] port($port): ${seq.map(x => params.getExuName(x._1) + "(" + x._2.toString + ")").mkString(",")}") + logger.trace(s" port($port): ${seq.map(x => params.getExuName(x._1) + "(" + x._2.toString + ")").mkString(",")}") } - println(s"[Backend] Int WbConfigs: ExuName(Priority)") + logger.trace(s"Int WbConfigs: ExuName(Priority)") for ((port, seq) <- params.getWbPortParams(IntData())) { - println(s"[Backend] port($port): ${seq.map(x => params.getExuName(x._1) + "(" + x._2.toString + ")").mkString(",")}") + logger.trace(s" port($port): ${seq.map(x => params.getExuName(x._1) + "(" + x._2.toString + ")").mkString(",")}") } - println(s"[Backend] Fp RdConfigs: ExuName(Priority)") + logger.trace(s"Fp RdConfigs: ExuName(Priority)") for ((port, seq) <- params.getRdPortParams(FpData())) { - println(s"[Backend] port($port): ${seq.map(x => params.getExuName(x._1) + "(" + x._2.toString + ")").mkString(",")}") + logger.trace(s" port($port): ${seq.map(x => params.getExuName(x._1) + "(" + x._2.toString + ")").mkString(",")}") } - println(s"[Backend] Fp WbConfigs: ExuName(Priority)") + logger.trace(s"Fp WbConfigs: ExuName(Priority)") for ((port, seq) <- params.getWbPortParams(FpData())) { - println(s"[Backend] port($port): ${seq.map(x => params.getExuName(x._1) + "(" + x._2.toString + ")").mkString(",")}") + logger.trace(s" port($port): ${seq.map(x => params.getExuName(x._1) + "(" + x._2.toString + ")").mkString(",")}") } - println(s"[Backend] Vf RdConfigs: ExuName(Priority)") + logger.trace(s"Vf RdConfigs: ExuName(Priority)") for ((port, seq) <- params.getRdPortParams(VecData())) { - println(s"[Backend] port($port): ${seq.map(x => params.getExuName(x._1) + "(" + x._2.toString + ")").mkString(",")}") + logger.trace(s" port($port): ${seq.map(x => params.getExuName(x._1) + "(" + x._2.toString + ")").mkString(",")}") } - println(s"[Backend] Vf WbConfigs: ExuName(Priority)") + logger.trace(s"Vf WbConfigs: ExuName(Priority)") for ((port, seq) <- params.getWbPortParams(VecData())) { - println(s"[Backend] port($port): ${seq.map(x => params.getExuName(x._1) + "(" + x._2.toString + ")").mkString(",")}") + logger.trace(s" port($port): ${seq.map(x => params.getExuName(x._1) + "(" + x._2.toString + ")").mkString(",")}") } - println(s"[Backend] Dispatch Configs:") - println(s"[Backend] Load IQ enq width(${params.numLoadDp}), Store IQ enq width(${params.numStoreDp})") - println(s"[Backend] Load DP width(${LSQLdEnqWidth}), Store DP width(${LSQStEnqWidth})") + logger.info(s"Dispatch Configs:") + logger.info(s" Load IQ enq width(${params.numLoadDp}), Store IQ enq width(${params.numStoreDp})") + logger.info(s" Load DP width(${LSQLdEnqWidth}), Store DP width(${LSQStEnqWidth})") params.updateCopyPdestInfo - println(s"[Backend] copyPdestInfo ${params.copyPdestInfo}") + logger.trace(s"copyPdestInfo ${params.copyPdestInfo}") params.allExuParams.map(_.copyNum) val ctrlBlock = LazyModule(new CtrlBlock(params)) val pcTargetMem = LazyModule(new PcTargetMem(params)) @@ -220,7 +220,7 @@ class BackendInlinedImp(override val wrapper: BackendInlined)(implicit p: Parame memScheduler.io.toSchedulers.wakeupVec ).map(x => (x.bits.exuIdx, x)).toMap - println(s"[Backend] iq wake up keys: ${iqWakeUpMappedBundle.keys}") + logger.trace(s"iq wake up keys: ${iqWakeUpMappedBundle.keys}") wbFuBusyTable.io.in.intSchdBusyTable := intScheduler.io.wbFuBusyTable wbFuBusyTable.io.in.fpSchdBusyTable := fpScheduler.io.wbFuBusyTable @@ -376,8 +376,8 @@ class BackendInlinedImp(override val wrapper: BackendInlined)(implicit p: Parame dataPath.io.ldCancel := io.mem.ldCancel - println(s"[Backend] wbDataPath.io.toIntPreg: ${wbDataPath.io.toIntPreg.size}, dataPath.io.fromIntWb: ${dataPath.io.fromIntWb.size}") - println(s"[Backend] wbDataPath.io.toVfPreg: ${wbDataPath.io.toVfPreg.size}, dataPath.io.fromFpWb: ${dataPath.io.fromVfWb.size}") + logger.trace(s"wbDataPath.io.toIntPreg: ${wbDataPath.io.toIntPreg.size}, dataPath.io.fromIntWb: ${dataPath.io.fromIntWb.size}") + logger.trace(s"wbDataPath.io.toVfPreg: ${wbDataPath.io.toVfPreg.size}, dataPath.io.fromFpWb: ${dataPath.io.fromVfWb.size}") dataPath.io.fromIntWb := wbDataPath.io.toIntPreg dataPath.io.fromFpWb := wbDataPath.io.toFpPreg dataPath.io.fromVfWb := wbDataPath.io.toVfPreg @@ -401,9 +401,9 @@ class BackendInlinedImp(override val wrapper: BackendInlined)(implicit p: Parame } og2ForVector.io.fromOg1ImmInfo := dataPath.io.og1ImmInfo.zip(params.allExuParams).filter(_._2.needOg2).map(_._1) - println(s"[Backend] BypassNetwork OG1 Mem Size: ${bypassNetwork.io.fromDataPath.mem.zip(params.memSchdParams.get.issueBlockParams).filterNot(_._2.needOg2Resp).size}") - println(s"[Backend] BypassNetwork OG2 Mem Size: ${bypassNetwork.io.fromDataPath.mem.zip(params.memSchdParams.get.issueBlockParams).filter(_._2.needOg2Resp).size}") - println(s"[Backend] bypassNetwork.io.fromDataPath.mem: ${bypassNetwork.io.fromDataPath.mem.size}, dataPath.io.toMemExu: ${dataPath.io.toMemExu.size}") + logger.trace(s"BypassNetwork OG1 Mem Size: ${bypassNetwork.io.fromDataPath.mem.zip(params.memSchdParams.get.issueBlockParams).filterNot(_._2.needOg2Resp).size}") + logger.trace(s"BypassNetwork OG2 Mem Size: ${bypassNetwork.io.fromDataPath.mem.zip(params.memSchdParams.get.issueBlockParams).filter(_._2.needOg2Resp).size}") + logger.trace(s"bypassNetwork.io.fromDataPath.mem: ${bypassNetwork.io.fromDataPath.mem.size}, dataPath.io.toMemExu: ${dataPath.io.toMemExu.size}") bypassNetwork.io.fromDataPath.int <> dataPath.io.toIntExu bypassNetwork.io.fromDataPath.fp <> dataPath.io.toFpExu bypassNetwork.io.fromDataPath.vf <> og2ForVector.io.toVfArithExu @@ -598,8 +598,8 @@ class BackendInlinedImp(override val wrapper: BackendInlined)(implicit p: Parame private val memIssueParams = params.memSchdParams.get.issueBlockParams private val memExuBlocksHasLDU = memIssueParams.map(_.exuBlockParams.map(x => x.hasLoadFu || x.hasHyldaFu)) private val memExuBlocksHasVecLoad = memIssueParams.map(_.exuBlockParams.map(x => x.hasVLoadFu)) - println(s"[Backend] memExuBlocksHasLDU: $memExuBlocksHasLDU") - println(s"[Backend] memExuBlocksHasVecLoad: $memExuBlocksHasVecLoad") + logger.debug(s"memExuBlocksHasLDU: $memExuBlocksHasLDU") + logger.debug(s"memExuBlocksHasVecLoad: $memExuBlocksHasVecLoad") private val toMem = Wire(bypassNetwork.io.toExus.mem.cloneType) for (i <- toMem.indices) { @@ -812,7 +812,7 @@ class BackendInlinedImp(override val wrapper: BackendInlined)(implicit p: Parame if (printEventCoding) { for (((name, inc), i) <- allPerfEvents.zipWithIndex) { - println("backend perfEvents Set", name, inc, i) + logger.trace(f"Backend perfEvents Set, ${name}, ${inc}, ${i}") } } diff --git a/src/main/scala/xiangshan/backend/BackendParams.scala b/src/main/scala/xiangshan/backend/BackendParams.scala index e274bc4bcda..ec35a842864 100644 --- a/src/main/scala/xiangshan/backend/BackendParams.scala +++ b/src/main/scala/xiangshan/backend/BackendParams.scala @@ -19,6 +19,7 @@ package xiangshan.backend import org.chipsalliance.cde.config.Parameters import chisel3._ import chisel3.util._ +import com.typesafe.scalalogging.LazyLogging import xiangshan.backend.Bundles._ import xiangshan.backend.datapath.DataConfig._ import xiangshan.backend.datapath.RdConfig._ @@ -36,7 +37,7 @@ case class BackendParams( schdParams : Map[SchedulerType, SchdBlockParams], pregParams : Seq[PregParams], iqWakeUpParams : Seq[WakeUpConfig], -) { +) extends LazyLogging { def debugEn(implicit p: Parameters): Boolean = p(DebugOptionsKey).EnableDifftest @@ -51,13 +52,13 @@ case class BackendParams( copyPdestInfo.contains(exuIdx) } def connectWakeup(exuIdx: Int): Unit = { - println(s"[Backend] copyPdestInfo ${copyPdestInfo}") + logger.trace(s"[Backend] copyPdestInfo ${copyPdestInfo}") if (copyPdestInfo.contains(exuIdx)) { - println(s"[Backend] exuIdx ${exuIdx} be connected, old info ${copyPdestInfo(exuIdx)}") + logger.trace(s"[Backend] exuIdx ${exuIdx} be connected, old info ${copyPdestInfo(exuIdx)}") val newInfo = exuIdx -> (copyPdestInfo(exuIdx)._1, copyPdestInfo(exuIdx)._2 + 1) copyPdestInfo.remove(exuIdx) copyPdestInfo += newInfo - println(s"[Backend] exuIdx ${exuIdx} be connected, new info ${copyPdestInfo(exuIdx)}") + logger.trace(s"[Backend] exuIdx ${exuIdx} be connected, new info ${copyPdestInfo(exuIdx)}") } } def getCopyPdestIndex(exuIdx: Int): Int = { @@ -436,7 +437,7 @@ case class BackendParams( val rdTypes = Seq(IntRD(), FpRD(), VfRD()) for(wbType <- wbTypes){ for(rdType <- rdTypes){ - println(s"[BackendParams] wbType: ${wbType}, rdType: ${rdType}") + logger.trace(s"[BackendParams] wbType: ${wbType}, rdType: ${rdType}") allRealExuParams.map { case exuParam => val wbPortConfigs = exuParam.wbPortConfigs @@ -458,7 +459,7 @@ case class BackendParams( .sortBy(_._1.get.priority) .groupBy(_._1.get.port).map { case (wbPort, intWbRdPairs) => val rdCfgs = intWbRdPairs.map(_._2).flatten - println(s"[BackendParams] wb port ${wbPort} rdcfgs: ${rdCfgs}") + logger.trace(s"[BackendParams] wb port ${wbPort} rdcfgs: ${rdCfgs}") rdCfgs.groupBy(_.port).foreach { case (p, rdCfg) => //println(s"[BackendParams] rdport: ${p}, cfgs: ${rdCfg}") rdCfg.zip(rdCfg.drop(1)).foreach { case (cfg0, cfg1) => assert(cfg0.priority <= cfg1.priority, s"an exu has high priority at ${wbType} wb port ${wbPort}, but has low priority at ${rdType} rd port ${p}") } diff --git a/src/main/scala/xiangshan/backend/exu/ExeUnitParams.scala b/src/main/scala/xiangshan/backend/exu/ExeUnitParams.scala index 46e05805259..65203c30b59 100644 --- a/src/main/scala/xiangshan/backend/exu/ExeUnitParams.scala +++ b/src/main/scala/xiangshan/backend/exu/ExeUnitParams.scala @@ -3,6 +3,7 @@ package xiangshan.backend.exu import org.chipsalliance.cde.config.Parameters import chisel3._ import chisel3.util._ +import com.typesafe.scalalogging.LazyLogging import xiangshan.backend.BackendParams import xiangshan.backend.Bundles.{ExuBypassBundle, ExuInput, ExuOutput} import xiangshan.backend.datapath.DataConfig.DataConfig @@ -10,7 +11,8 @@ import xiangshan.backend.datapath.RdConfig._ import xiangshan.backend.datapath.WbConfig._ import xiangshan.backend.datapath.{DataConfig, WakeUpConfig} import xiangshan.backend.fu.{FuConfig, FuType} -import xiangshan.backend.issue.{IssueBlockParams, SchedulerType, IntScheduler, VfScheduler, MemScheduler} +import xiangshan.backend.issue.{IntScheduler, IssueBlockParams, MemScheduler, SchedulerType, VfScheduler} + import scala.collection.mutable case class ExeUnitParams( @@ -24,7 +26,7 @@ case class ExeUnitParams( )( implicit val schdType: SchedulerType, -) { +) extends LazyLogging { // calculated configs var iqWakeUpSourcePairs: Seq[WakeUpConfig] = Seq() var iqWakeUpSinkPairs: Seq[WakeUpConfig] = Seq() @@ -127,7 +129,7 @@ case class ExeUnitParams( } } } - println(s"[Backend] exuIdx ${exuIdx} numWakeupIQ ${setIQ.size}") + logger.trace(s"[Backend] exuIdx ${exuIdx} numWakeupIQ ${setIQ.size}") 1 + setIQ.size / copyDistance } def rdPregIdxWidth: Int = { @@ -326,7 +328,7 @@ case class ExeUnitParams( val wakeUpByLoadNames = loadWakeUpSourcePairs.map(_.sink.name).toSet val thisWakeUpByNames = iqWakeUpSinkPairs.map(_.source.name).toSet this.needLoadDependency = !(wakeUpByLoadNames & thisWakeUpByNames).isEmpty - println(s"${this.name}: needLoadDependency is ${this.needLoadDependency}") + logger.trace(s"${this.name}: needLoadDependency is ${this.needLoadDependency}") } def updateExuIdx(idx: Int): Unit = { diff --git a/src/main/scala/xiangshan/backend/fu/NewCSR/MachineLevel.scala b/src/main/scala/xiangshan/backend/fu/NewCSR/MachineLevel.scala index 53a547fa17d..4a4bc6e0b3c 100644 --- a/src/main/scala/xiangshan/backend/fu/NewCSR/MachineLevel.scala +++ b/src/main/scala/xiangshan/backend/fu/NewCSR/MachineLevel.scala @@ -3,9 +3,10 @@ package xiangshan.backend.fu.NewCSR import chisel3._ import chisel3.experimental.SourceInfo import chisel3.util._ +import com.typesafe.scalalogging.LazyLogging import org.chipsalliance.cde.config.Parameters import freechips.rocketchip.rocket.CSRs -import utility.{SignExt, PerfEvent} +import utility.{PerfEvent, SignExt} import xiangshan.backend.fu.NewCSR.CSRBundles._ import xiangshan.backend.fu.NewCSR.CSRDefines._ import xiangshan.backend.fu.NewCSR.CSRDefines.{CSRROField => RO, CSRRWField => RW, _} @@ -18,14 +19,14 @@ import xiangshan.backend.fu.NewCSR.CSRFunc._ import scala.collection.immutable.SeqMap -trait MachineLevel { self: NewCSR => +trait MachineLevel extends LazyLogging { self: NewCSR => val mstatus = Module(new MstatusModule) .setAddr(CSRs.mstatus) val misa = Module(new CSRModule("Misa", new MisaBundle)) .setAddr(CSRs.misa) - println(s"[CSR] supported isa ext: ${misa.bundle.getISAString}") + logger.info(s"[CSR] supported isa ext: ${misa.bundle.getISAString}") val medeleg = Module(new CSRModule("Medeleg", new MedelegBundle)) .setAddr(CSRs.medeleg) diff --git a/src/main/scala/xiangshan/backend/fu/NewCSR/NewCSR.scala b/src/main/scala/xiangshan/backend/fu/NewCSR/NewCSR.scala index 5fb5192ae7e..9e2e68db5a0 100644 --- a/src/main/scala/xiangshan/backend/fu/NewCSR/NewCSR.scala +++ b/src/main/scala/xiangshan/backend/fu/NewCSR/NewCSR.scala @@ -716,8 +716,8 @@ class NewCSR(implicit val p: Parameters) extends Module } csrMods.foreach { mod => - println(s"${mod.modName}: ") - println(mod.dumpFields) + logger.trace(s"${mod.modName}: ") + logger.trace(mod.dumpFields) } trapEntryMNEvent.valid := ((hasTrap && nmi) || dbltrpToMN) && !entryDebugMode && !debugMode && mnstatus.regOut.NMIE diff --git a/src/main/scala/xiangshan/backend/issue/Dispatch2Iq.scala b/src/main/scala/xiangshan/backend/issue/Dispatch2Iq.scala index 5c22f0fb4fd..d5d00bd86ce 100644 --- a/src/main/scala/xiangshan/backend/issue/Dispatch2Iq.scala +++ b/src/main/scala/xiangshan/backend/issue/Dispatch2Iq.scala @@ -47,7 +47,7 @@ class Dispatch2Iq(val schdBlockParams : SchdBlockParams)(implicit p: Parameters) x => x.numVlSrc ).max).max - println(s"[Dispatch2Iq] numRegSrc: ${numRegSrc}, numRegSrcInt: ${numRegSrcInt}, numRegSrcFp: ${numRegSrcFp}, " + + logger.debug(s"numRegSrc: ${numRegSrc}, numRegSrcInt: ${numRegSrcInt}, numRegSrcFp: ${numRegSrcFp}, " + s"numRegSrcVf: ${numRegSrcVf}, numRegSrcV0: ${numRegSrcV0}, numRegSrcVl: ${numRegSrcVl}") val numIntStateRead = schdBlockParams.schdType match { diff --git a/src/main/scala/xiangshan/backend/rename/CompressUnit.scala b/src/main/scala/xiangshan/backend/rename/CompressUnit.scala index c60a5ea78dd..a7e39ef5735 100644 --- a/src/main/scala/xiangshan/backend/rename/CompressUnit.scala +++ b/src/main/scala/xiangshan/backend/rename/CompressUnit.scala @@ -68,7 +68,7 @@ class CompressUnit(implicit p: Parameters) extends XSModule{ else Seq.fill(RenameWidth)(0).patch(idx - cntL(idx), Seq.fill(size)(1), size) } - println("[Rename.Compress]" + + logger.trace( " i: " + keyCandidate + " key: " + key.tail.dropRight(1) + " needRobs: " + needRobs + diff --git a/src/main/scala/xiangshan/backend/rename/Rename.scala b/src/main/scala/xiangshan/backend/rename/Rename.scala index f79f04b7bb7..edece2f3d03 100644 --- a/src/main/scala/xiangshan/backend/rename/Rename.scala +++ b/src/main/scala/xiangshan/backend/rename/Rename.scala @@ -44,7 +44,7 @@ class Rename(implicit p: Parameters) extends XSModule with HasCircularQueuePtrHe private val numVecRegSrc = backendParams.numVecRegSrc private val numVecRatPorts = numVecRegSrc - println(s"[Rename] numRegSrc: $numRegSrc") + logger.info(s"numRegSrc: $numRegSrc") val io = IO(new Bundle() { val redirect = Flipped(ValidIO(new Redirect)) diff --git a/src/main/scala/xiangshan/frontend/BPU.scala b/src/main/scala/xiangshan/frontend/BPU.scala index 3a364c8b5a8..721e4b835bb 100644 --- a/src/main/scala/xiangshan/frontend/BPU.scala +++ b/src/main/scala/xiangshan/frontend/BPU.scala @@ -329,7 +329,7 @@ class Predictor(implicit p: Parameters) extends XSModule with HasBPUConst with H val s0_ghist = WireInit(0.U.asTypeOf(UInt(HistoryLength.W))) - println(f"history buffer length ${HistoryLength}") + logger.info(f"history buffer length ${HistoryLength}") val ghv_write_datas = Wire(Vec(HistoryLength, Bool())) val ghv_wens = Wire(Vec(HistoryLength, Bool())) diff --git a/src/main/scala/xiangshan/frontend/Composer.scala b/src/main/scala/xiangshan/frontend/Composer.scala index ed21c0b604c..cea20c84895 100644 --- a/src/main/scala/xiangshan/frontend/Composer.scala +++ b/src/main/scala/xiangshan/frontend/Composer.scala @@ -28,7 +28,7 @@ class Composer(implicit p: Parameters) extends BasePredictor with HasBPUConst wi require(all_fast_pred.length <= 1) if (all_fast_pred.length == 1) { val fast_pred = all_fast_pred(0) - println("[composer] bypassing output of fast pred: " + fast_pred.name) + logger.info("bypassing output of fast pred: " + fast_pred.name) io.out.s1 := fast_pred.io.out.s1 } @@ -59,7 +59,7 @@ class Composer(implicit p: Parameters) extends BasePredictor with HasBPUConst wi } meta_sz = meta_sz + c.meta_size } - println(s"total meta size: $meta_sz\n\n") + logger.info(s"total meta size: $meta_sz") io.in.ready := components.map(_.io.s1_ready).reduce(_ && _) diff --git a/src/main/scala/xiangshan/frontend/Frontend.scala b/src/main/scala/xiangshan/frontend/Frontend.scala index 5957649b5e8..bcd43d6d8e8 100644 --- a/src/main/scala/xiangshan/frontend/Frontend.scala +++ b/src/main/scala/xiangshan/frontend/Frontend.scala @@ -400,7 +400,7 @@ class FrontendInlinedImp(outer: FrontendInlined) extends LazyModuleImp(outer) if (printEventCoding) { for (((name, inc), i) <- allPerfEvents.zipWithIndex) { - println("Frontend perfEvents Set", name, inc, i) + logger.trace(f"Frontend perfEvents Set, ${name}, ${inc}, ${i}") } } diff --git a/src/main/scala/xiangshan/frontend/ITTAGE.scala b/src/main/scala/xiangshan/frontend/ITTAGE.scala index b038b3be2a3..58e4fe16aec 100644 --- a/src/main/scala/xiangshan/frontend/ITTAGE.scala +++ b/src/main/scala/xiangshan/frontend/ITTAGE.scala @@ -148,7 +148,7 @@ class ITTageTable( val foldedWidth = if (nRows >= SRAM_SIZE) nRows / SRAM_SIZE else 1 if (nRows < SRAM_SIZE) { - println(f"warning: ittage table $tableIdx has small sram depth of $nRows") + logger.warn(f"ittage table $tableIdx has small sram depth of $nRows") } // override val debug = true diff --git a/src/main/scala/xiangshan/frontend/NewFtq.scala b/src/main/scala/xiangshan/frontend/NewFtq.scala index c140a51ae1c..a79bc4455c0 100644 --- a/src/main/scala/xiangshan/frontend/NewFtq.scala +++ b/src/main/scala/xiangshan/frontend/NewFtq.scala @@ -633,7 +633,7 @@ class Ftq(implicit p: Parameters) extends XSModule with HasCircularQueuePtrHelpe ftq_redirect_mem.io.wen(0) := io.fromBpu.resp.bits.lastStage.valid(3) ftq_redirect_mem.io.waddr(0) := io.fromBpu.resp.bits.lastStage.ftq_idx.value ftq_redirect_mem.io.wdata(0) := io.fromBpu.resp.bits.last_stage_spec_info - println(f"ftq redirect MEM: entry ${ftq_redirect_mem.io.wdata(0).getWidth} * ${FtqSize} * 3") + logger.info(f"ftq redirect MEM: ${FtqSize} x ${ftq_redirect_mem.io.wdata(0).getWidth}") val ftq_meta_1r_sram = Module(new FtqNRSRAM(new Ftq_1R_SRAMEntry, 1)) // these info is intended to enq at the last stage of bpu diff --git a/src/main/scala/xiangshan/frontend/Tage.scala b/src/main/scala/xiangshan/frontend/Tage.scala index 7456c219a48..d0cf5b37dc5 100644 --- a/src/main/scala/xiangshan/frontend/Tage.scala +++ b/src/main/scala/xiangshan/frontend/Tage.scala @@ -295,7 +295,7 @@ class TageTable( val bankFoldWidth = if (bankSize >= bankSRAMSize) bankSize / bankSRAMSize else 1 val uFoldedWidth = nRowsPerBr / uSRAMSize if (bankSize < bankSRAMSize) { - println(f"warning: tage table $tableIdx has small sram depth of $bankSize") + logger.warn(f"tage table $tableIdx has small sram depth of $bankSize") } val bankIdxWidth = log2Ceil(nBanks) def get_bank_mask(idx: UInt) = VecInit((0 until nBanks).map(idx(bankIdxWidth - 1, 0) === _.U)) From e24c12b749f6f3066afc497a517e648f8cc8b410 Mon Sep 17 00:00:00 2001 From: Easton Man Date: Fri, 22 Nov 2024 17:34:51 +0800 Subject: [PATCH 3/7] feat(logging): subsitute more --- build.sc | 5 ++- .../{logback-test.xml => logback.xml} | 4 +- .../xiangshan/backend/issue/Dispatch2Iq.scala | 40 +++++++++---------- .../xiangshan/backend/issue/IssueQueue.scala | 10 ++--- .../xiangshan/backend/issue/Scheduler.scala | 34 ++++++++-------- 5 files changed, 47 insertions(+), 46 deletions(-) rename src/main/resources/{logback-test.xml => logback.xml} (81%) diff --git a/build.sc b/build.sc index 4100e8b6b69..f08189caae4 100644 --- a/build.sc +++ b/build.sc @@ -263,7 +263,7 @@ object xiangshan extends XiangShanModule with HasChisel with HasLogging with Sca def macrosModule = macros - override def forkArgs = Seq("-Xmx40G", "-Xss256m") + override def forkArgs = Seq("-Xmx40G", "-Xss256m", "-Dlogback.configurationFile=src/main/resources/logback.xml") override def ivyDeps = super.ivyDeps() ++ Agg( defaultVersions("chiseltest"), @@ -334,7 +334,8 @@ object xiangshan extends XiangShanModule with HasChisel with HasLogging with Sca } object test extends SbtTests with TestModule.ScalaTest { - override def forkArgs = Seq("-Xmx40G", "-Xss256m") + override def forkArgs = Seq("-Xmx40G", "-Xss256m", + "-Dlogback.configurationFile=src/main/resources/logback.xml", "-XX:+UseShenandoahGC") override def ivyDeps = super.ivyDeps() ++ Agg( defaultVersions("chiseltest") diff --git a/src/main/resources/logback-test.xml b/src/main/resources/logback.xml similarity index 81% rename from src/main/resources/logback-test.xml rename to src/main/resources/logback.xml index aee4d2c0f1e..d361853bbc4 100644 --- a/src/main/resources/logback-test.xml +++ b/src/main/resources/logback.xml @@ -1,10 +1,10 @@ - + %gray(%d{yyyy-MM-dd HH:mm:ss}) %highlight(%-5level) %cyan(%-24logger{24}) | %msg%n - + diff --git a/src/main/scala/xiangshan/backend/issue/Dispatch2Iq.scala b/src/main/scala/xiangshan/backend/issue/Dispatch2Iq.scala index d5d00bd86ce..d076f2cb12f 100644 --- a/src/main/scala/xiangshan/backend/issue/Dispatch2Iq.scala +++ b/src/main/scala/xiangshan/backend/issue/Dispatch2Iq.scala @@ -683,17 +683,17 @@ class Dispatch2IqArithImp(override val wrapper: Dispatch2Iq)(implicit p: Paramet with HasXSParameter { val portFuSets = params.issueBlockParams.map(_.exuBlockParams.flatMap(_.fuConfigs).map(_.fuType).toSet) - println(s"[Dispatch2IqArithImp] portFuSets: $portFuSets") + logger.debug(s"[Dispatch2IqArithImp] portFuSets: $portFuSets") val fuDeqMap = getFuDeqMap(portFuSets) - println(s"[Dispatch2IqArithImp] fuDeqMap: $fuDeqMap") + logger.debug(s"[Dispatch2IqArithImp] fuDeqMap: $fuDeqMap") val mergedFuDeqMap = mergeFuDeqMap(fuDeqMap) - println(s"[Dispatch2IqArithImp] mergedFuDeqMap: $mergedFuDeqMap") + logger.debug(s"[Dispatch2IqArithImp] mergedFuDeqMap: $mergedFuDeqMap") val expendedFuDeqMap = expendFuDeqMap(mergedFuDeqMap, params.issueBlockParams.map(_.numEnq)) - println(s"[Dispatch2IqArithImp] expendedFuDeqMap: $expendedFuDeqMap") + logger.debug(s"[Dispatch2IqArithImp] expendedFuDeqMap: $expendedFuDeqMap") // sort by count of port. Port less, priority higher. val finalFuDeqMap = expendedFuDeqMap.toSeq.sortBy(_._2.length) - println(s"[Dispatch2IqArithImp] finalFuDeqMap: $finalFuDeqMap") + logger.debug(s"[Dispatch2IqArithImp] finalFuDeqMap: $finalFuDeqMap") val outReadyMatrix = Wire(Vec(outs.size, Vec(numInPorts, Bool()))) outReadyMatrix.foreach(_.foreach(_ := false.B)) @@ -731,9 +731,9 @@ class Dispatch2IqArithImp(override val wrapper: Dispatch2Iq)(implicit p: Paramet } val portSelIdxOH: Map[Seq[Int], Vec[ValidIO[UInt]]] = finalFuDeqMap.zip(selIdxOH).map{ case ((fuTypeSeq, deqPortIdSeq), selIdxOHSeq) => (deqPortIdSeq, selIdxOHSeq)}.toMap - println(s"[Dispatch2IQ] portSelIdxOH: $portSelIdxOH") + logger.trace(s"[Dispatch2IQ] portSelIdxOH: $portSelIdxOH") val finalportSelIdxOH: mutable.Map[Int, Seq[ValidIO[UInt]]] = expendPortSel(portSelIdxOH) - println(s"[Dispatch2IQ] finalportSelIdxOH: $finalportSelIdxOH") + logger.trace(s"[Dispatch2IQ] finalportSelIdxOH: $finalportSelIdxOH") finalportSelIdxOH.foreach{ case (portId, selSeq) => val finalSelIdxOH: UInt = PriorityMux(selSeq.map(_.valid).toSeq, selSeq.map(_.bits).toSeq) outs(portId).valid := selSeq.map(_.valid).reduce(_ | _) @@ -967,17 +967,17 @@ class Dispatch2IqMemImp(override val wrapper: Dispatch2Iq)(implicit p: Parameter } val portFuSets = params.issueBlockParams.map(_.exuBlockParams.filterNot(_.hasStdFu).flatMap(_.fuConfigs).map(_.fuType).toSet) - println(s"[Dispatch2IqMemImp] portFuSets: $portFuSets") + logger.debug(s"[Dispatch2IqMemImp] portFuSets: $portFuSets") val fuDeqMap = getFuDeqMap(portFuSets) - println(s"[Dispatch2IqMemImp] fuDeqMap: $fuDeqMap") + logger.debug(s"[Dispatch2IqMemImp] fuDeqMap: $fuDeqMap") val mergedFuDeqMap = mergeFuDeqMap(fuDeqMap) - println(s"[Dispatch2IqMemImp] mergedFuDeqMap: $mergedFuDeqMap") + logger.debug(s"[Dispatch2IqMemImp] mergedFuDeqMap: $mergedFuDeqMap") val expendedFuDeqMap = expendFuDeqMap(mergedFuDeqMap, params.issueBlockParams.map(_.numEnq)) - println(s"[Dispatch2IqMemImp] expendedFuDeqMap: $expendedFuDeqMap") + logger.debug(s"[Dispatch2IqMemImp] expendedFuDeqMap: $expendedFuDeqMap") // sort by count of port. Port less, priority higher. val finalFuDeqMap = expendedFuDeqMap.toSeq.sortBy(_._2.length) - println(s"[Dispatch2IqMemImp] finalFuDeqMap: $finalFuDeqMap") + logger.debug(s"[Dispatch2IqMemImp] finalFuDeqMap: $finalFuDeqMap") val selIdxOH = Wire(MixedVec(finalFuDeqMap.map(x => Vec(x._2.size, ValidIO(UInt(uopsIn.size.W)))))) selIdxOH.foreach(_.foreach(_ := 0.U.asTypeOf(ValidIO(UInt(uopsIn.size.W))))) @@ -1019,10 +1019,10 @@ class Dispatch2IqMemImp(override val wrapper: Dispatch2Iq)(implicit p: Parameter require(loadMoreHyuDeq.sorted == expendedFuDeqMap(Seq(ldu)).sorted) require(loadLessHyuDeq.sorted == expendedFuDeqMap(Seq(ldu)).sorted) - println(storeDeq) - println(storeDeq.sorted) - println(expendedStuDeq) - println(expendedStuDeq.sorted) + logger.trace(storeDeq.toString()) + logger.trace(storeDeq.sorted.toString()) + logger.trace(expendedStuDeq.toString()) + logger.trace(expendedStuDeq.sorted.toString()) require(storeDeq.sorted == expendedStuDeq.sorted) // Seq(storeCnt)(priority) @@ -1114,7 +1114,7 @@ class Dispatch2IqMemImp(override val wrapper: Dispatch2Iq)(implicit p: Parameter finalFuDeqMap.zipWithIndex.foreach { case ((Seq(FuType.ldu), deqPortIdSeq), i) => - println(s"[Dispatch2IqMemImp] deqPort $deqPortIdSeq use ldu policy") + logger.trace(s"[Dispatch2IqMemImp] deqPort $deqPortIdSeq use ldu policy") val maxSelNum = wrapper.numIn val selNum = deqPortIdSeq.length val portReadyVec = loadReadyDecoder.map(Mux1H(_, deqPortIdSeq.map(outs(_).ready).toSeq)) @@ -1130,7 +1130,7 @@ class Dispatch2IqMemImp(override val wrapper: Dispatch2Iq)(implicit p: Parameter } } case ((fuTypeSeq, deqPortIdSeq), i) if fuTypeSeq.contains(FuType.stu) => - println(s"[Dispatch2IqMemImp] deqPort $deqPortIdSeq use stu policy") + logger.trace(s"[Dispatch2IqMemImp] deqPort $deqPortIdSeq use stu policy") val maxSelNum = wrapper.numIn val selNum = deqPortIdSeq.length val portReadyVec = storeReadyDecoder.map(Mux1H(_, deqPortIdSeq.map(outs(_).ready).toSeq)) @@ -1163,9 +1163,9 @@ class Dispatch2IqMemImp(override val wrapper: Dispatch2Iq)(implicit p: Parameter } val portSelIdxOH: Map[Seq[Int], Vec[ValidIO[UInt]]] = finalFuDeqMap.zip(selIdxOH).map { case ((fuTypeSeq, deqPortIdSeq), selIdxOHSeq) => (deqPortIdSeq, selIdxOHSeq) }.toMap - println(s"[Dispatch2IQ] portSelIdxOH: $portSelIdxOH") + logger.trace(s"[Dispatch2IQ] portSelIdxOH: $portSelIdxOH") val deqSelIdxOHSeq: mutable.Map[Int, Seq[ValidIO[UInt]]] = expendPortSel(portSelIdxOH) - println(s"[Dispatch2IQ] finalportSelIdxOH: $deqSelIdxOHSeq") + logger.trace(s"[Dispatch2IQ] finalportSelIdxOH: $deqSelIdxOHSeq") // Todo: split this matrix into more deq parts // deqSelIdxVec(deqIdx)(enqIdx): enqIdx uop can be accepted by deqIdx diff --git a/src/main/scala/xiangshan/backend/issue/IssueQueue.scala b/src/main/scala/xiangshan/backend/issue/IssueQueue.scala index 6458d547357..3d04b7a0740 100644 --- a/src/main/scala/xiangshan/backend/issue/IssueQueue.scala +++ b/src/main/scala/xiangshan/backend/issue/IssueQueue.scala @@ -82,7 +82,7 @@ class IssueQueueImp(override val wrapper: IssueQueue)(implicit p: Parameters, va override def desiredName: String = s"${params.getIQName}" - println(s"[IssueQueueImp] ${params.getIQName} wakeupFromWB(${io.wakeupFromWB.size}), " + + logger.debug(s"${params.getIQName} wakeupFromWB(${io.wakeupFromWB.size}), " + s"wakeup exu in(${params.wakeUpInExuSources.size}): ${params.wakeUpInExuSources.map(_.name).mkString("{",",","}")}, " + s"wakeup exu out(${params.wakeUpOutExuSources.size}): ${params.wakeUpOutExuSources.map(_.name).mkString("{",",","}")}, " + s"numEntries: ${params.numEntries}, numRegSrc: ${params.numRegSrc}, " + @@ -100,8 +100,8 @@ class IssueQueueImp(override val wrapper: IssueQueue)(implicit p: Parameters, va val commonFuCfgs : Seq[FuConfig] = fuCfgsCnt.filter(_._2 > 1).keys.toSeq val wakeupFuLatencyMaps : Seq[Map[FuType.OHType, Int]] = params.exuBlockParams.map(x => x.wakeUpFuLatencyMap) - println(s"[IssueQueueImp] ${params.getIQName} fuLatencyMaps: ${wakeupFuLatencyMaps}") - println(s"[IssueQueueImp] ${params.getIQName} commonFuCfgs: ${commonFuCfgs.map(_.name)}") + logger.trace(s"${params.getIQName} fuLatencyMaps: ${wakeupFuLatencyMaps}") + logger.trace(s"${params.getIQName} commonFuCfgs: ${commonFuCfgs.map(_.name)}") lazy val io = IO(new IssueQueueIO()) // Modules @@ -1070,7 +1070,7 @@ class IssueQueueMemAddrImp(override val wrapper: IssueQueue)(implicit p: Paramet require(params.StdCnt == 0 && (params.LduCnt + params.StaCnt + params.HyuCnt) > 0, "IssueQueueMemAddrImp can only be instance of MemAddr IQ, " + s"StdCnt: ${params.StdCnt}, LduCnt: ${params.LduCnt}, StaCnt: ${params.StaCnt}, HyuCnt: ${params.HyuCnt}") - println(s"[IssueQueueMemAddrImp] StdCnt: ${params.StdCnt}, LduCnt: ${params.LduCnt}, StaCnt: ${params.StaCnt}, HyuCnt: ${params.HyuCnt}") + logger.trace(s"[IssueQueueMemAddrImp] StdCnt: ${params.StdCnt}, LduCnt: ${params.LduCnt}, StaCnt: ${params.StaCnt}, HyuCnt: ${params.HyuCnt}") io.suggestName("none") override lazy val io = IO(new IssueQueueMemIO).suggestName("io") @@ -1143,7 +1143,7 @@ class IssueQueueVecMemImp(override val wrapper: IssueQueue)(implicit p: Paramete extends IssueQueueImp(wrapper) with HasCircularQueuePtrHelper { require((params.VlduCnt + params.VstuCnt) > 0, "IssueQueueVecMemImp can only be instance of VecMem IQ") - println(s"[IssueQueueVecMemImp] VlduCnt: ${params.VlduCnt}, VstuCnt: ${params.VstuCnt}") + logger.trace(s"[IssueQueueVecMemImp] VlduCnt: ${params.VlduCnt}, VstuCnt: ${params.VstuCnt}") io.suggestName("none") override lazy val io = IO(new IssueQueueMemIO).suggestName("io") diff --git a/src/main/scala/xiangshan/backend/issue/Scheduler.scala b/src/main/scala/xiangshan/backend/issue/Scheduler.scala index a94453f4cbf..36b4819ea3f 100644 --- a/src/main/scala/xiangshan/backend/issue/Scheduler.scala +++ b/src/main/scala/xiangshan/backend/issue/Scheduler.scala @@ -362,11 +362,11 @@ abstract class SchedulerImpBase(wrapper: Scheduler)(implicit params: SchdBlockPa iq.io.wakeupFromIQ.foreach { wakeUp => val wakeUpIn = iqWakeUpInMap(wakeUp.bits.exuIdx) val exuIdx = wakeUp.bits.exuIdx - println(s"[Backend] Connect wakeup exuIdx ${exuIdx}") + logger.debug(s"[Backend] Connect wakeup exuIdx ${exuIdx}") connectSamePort(wakeUp,wakeUpIn) backendParams.connectWakeup(exuIdx) if (backendParams.isCopyPdest(exuIdx)) { - println(s"[Backend] exuIdx ${exuIdx} use pdestCopy ${backendParams.getCopyPdestIndex(exuIdx)}") + logger.trace(s"[Backend] exuIdx ${exuIdx} use pdestCopy ${backendParams.getCopyPdestIndex(exuIdx)}") wakeUp.bits.pdest := wakeUpIn.bits.pdestCopy.get(backendParams.getCopyPdestIndex(exuIdx)) if (wakeUpIn.bits.rfWenCopy.nonEmpty) wakeUp.bits.rfWen := wakeUpIn.bits.rfWenCopy.get(backendParams.getCopyPdestIndex(exuIdx)) if (wakeUpIn.bits.fpWenCopy.nonEmpty) wakeUp.bits.fpWen := wakeUpIn.bits.fpWenCopy.get(backendParams.getCopyPdestIndex(exuIdx)) @@ -460,8 +460,8 @@ abstract class SchedulerImpBase(wrapper: Scheduler)(implicit params: SchdBlockPa iq := in } - println(s"[Scheduler] numWriteRegCache: ${params.numWriteRegCache}") - println(s"[Scheduler] iqReplaceRCIdxVec: ${iqReplaceRCIdxVec.size}") + logger.info(s"numWriteRegCache: ${params.numWriteRegCache}") + logger.debug(s"iqReplaceRCIdxVec: ${iqReplaceRCIdxVec.size}") } // perfEvent @@ -475,11 +475,11 @@ abstract class SchedulerImpBase(wrapper: Scheduler)(implicit params: SchdBlockPa ("issueQueue_enq_fire_cnt", PopCount(lastCycleIqEnqFireVec) ) ) ++ issueQueueFullVecPerf - println(s"[Scheduler] io.fromSchedulers.wakeupVec: ${io.fromSchedulers.wakeupVec.map(x => backendParams.getExuName(x.bits.exuIdx))}") - println(s"[Scheduler] iqWakeUpInKeys: ${iqWakeUpInMap.keys}") + logger.debug(s"[Scheduler] io.fromSchedulers.wakeupVec: ${io.fromSchedulers.wakeupVec.map(x => backendParams.getExuName(x.bits.exuIdx))}") + logger.debug(s"[Scheduler] iqWakeUpInKeys: ${iqWakeUpInMap.keys}") - println(s"[Scheduler] iqWakeUpOutKeys: ${iqWakeUpOutMap.keys}") - println(s"[Scheduler] io.toSchedulers.wakeupVec: ${io.toSchedulers.wakeupVec.map(x => backendParams.getExuName(x.bits.exuIdx))}") + logger.debug(s"[Scheduler] iqWakeUpOutKeys: ${iqWakeUpOutMap.keys}") + logger.debug(s"[Scheduler] io.toSchedulers.wakeupVec: ${io.toSchedulers.wakeupVec.map(x => backendParams.getExuName(x.bits.exuIdx))}") } class SchedulerArithImp(override val wrapper: Scheduler)(implicit params: SchdBlockParams, p: Parameters) @@ -488,7 +488,7 @@ class SchedulerArithImp(override val wrapper: Scheduler)(implicit params: SchdBl with HasPerfEvents { // dontTouch(io.vfWbFuBusyTable) - println(s"[SchedulerArithImp] " + + logger.trace(s"[SchedulerArithImp] " + s"has intBusyTable: ${intBusyTable.nonEmpty}, " + s"has vfBusyTable: ${vfBusyTable.nonEmpty}") @@ -519,7 +519,7 @@ class SchedulerMemImp(override val wrapper: Scheduler)(implicit params: SchdBloc with HasXSParameter with HasPerfEvents { - println(s"[SchedulerMemImp] " + + logger.trace(s"[SchedulerMemImp] " + s"has intBusyTable: ${intBusyTable.nonEmpty}, " + s"has vfBusyTable: ${vfBusyTable.nonEmpty}") @@ -530,11 +530,11 @@ class SchedulerMemImp(override val wrapper: Scheduler)(implicit params: SchdBloc val vecMemIQs = issueQueues.filter(_.params.isVecMemIQ) val (hyuIQs, hyuIQIdxs) = issueQueues.zipWithIndex.filter(_._1.params.HyuCnt > 0).unzip - println(s"[SchedulerMemImp] memAddrIQs.size: ${memAddrIQs.size}, enq.size: ${memAddrIQs.map(_.io.enq.size).sum}") - println(s"[SchedulerMemImp] stAddrIQs.size: ${stAddrIQs.size }, enq.size: ${stAddrIQs.map(_.io.enq.size).sum}") - println(s"[SchedulerMemImp] ldAddrIQs.size: ${ldAddrIQs.size }, enq.size: ${ldAddrIQs.map(_.io.enq.size).sum}") - println(s"[SchedulerMemImp] stDataIQs.size: ${stDataIQs.size }, enq.size: ${stDataIQs.map(_.io.enq.size).sum}") - println(s"[SchedulerMemImp] hyuIQs.size: ${hyuIQs.size }, enq.size: ${hyuIQs.map(_.io.enq.size).sum}") + logger.info(s"memAddrIQs.size: ${memAddrIQs.size}, enq.size: ${memAddrIQs.map(_.io.enq.size).sum}") + logger.info(s"stAddrIQs.size: ${stAddrIQs.size }, enq.size: ${stAddrIQs.map(_.io.enq.size).sum}") + logger.info(s"ldAddrIQs.size: ${ldAddrIQs.size }, enq.size: ${ldAddrIQs.map(_.io.enq.size).sum}") + logger.info(s"stDataIQs.size: ${stDataIQs.size }, enq.size: ${stDataIQs.map(_.io.enq.size).sum}") + logger.info(s"hyuIQs.size: ${hyuIQs.size }, enq.size: ${hyuIQs.map(_.io.enq.size).sum}") require(memAddrIQs.nonEmpty && stDataIQs.nonEmpty) io.toMem.get.loadFastMatch := 0.U.asTypeOf(io.toMem.get.loadFastMatch) // TODO: is still needed? @@ -590,8 +590,8 @@ class SchedulerMemImp(override val wrapper: Scheduler)(implicit params: SchdBloc private val staIdxSeq = (stAddrIQs).map(iq => iq.params.idxInSchBlk) private val hyaIdxSeq = (hyuIQs).map(iq => iq.params.idxInSchBlk) - println(s"[SchedulerMemImp] sta iq idx in memSchdBlock: $staIdxSeq") - println(s"[SchedulerMemImp] hya iq idx in memSchdBlock: $hyaIdxSeq") + logger.debug(s"[SchedulerMemImp] sta iq idx in memSchdBlock: $staIdxSeq") + logger.debug(s"[SchedulerMemImp] hya iq idx in memSchdBlock: $hyaIdxSeq") private val staEnqs = stAddrIQs.map(_.io.enq).flatten private val stdEnqs = stDataIQs.map(_.io.enq).flatten.take(staEnqs.size) From f7be5711003c90acd3a61a93af41efd416b06b2f Mon Sep 17 00:00:00 2001 From: Easton Man Date: Tue, 3 Dec 2024 00:13:56 +0800 Subject: [PATCH 4/7] feat(args): add --verbose for debug logging --- src/main/scala/top/ArgParser.scala | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/main/scala/top/ArgParser.scala b/src/main/scala/top/ArgParser.scala index 4a7d76d6f66..cec203c9cb0 100644 --- a/src/main/scala/top/ArgParser.scala +++ b/src/main/scala/top/ArgParser.scala @@ -146,6 +146,9 @@ object ArgParser { case "--firtool-opt" :: option :: tail => firtoolOpts ++= option.split(" ").filter(_.nonEmpty) nextOption(config, tail) + case "--verbose" :: tail => + System.setProperty("LOG_LEVEL", "all") + nextOption(config, tail) case option :: tail => // unknown option, maybe a firrtl option, skip firrtlOpts :+= option From 10824876959c9b383d76facdd939090c438c440e Mon Sep 17 00:00:00 2001 From: Easton Man Date: Tue, 3 Dec 2024 00:33:26 +0800 Subject: [PATCH 5/7] feat(logging): transform more --- .../backend/datapath/BypassNetwork.scala | 16 ++++---- .../xiangshan/backend/datapath/DataPath.scala | 37 ++++++++++--------- .../backend/datapath/WbArbiter.scala | 24 ++++++------ .../backend/dispatch/Dispatch2IqFpImp.scala | 10 ++--- src/main/scala/xiangshan/cache/mmu/TLB.scala | 5 ++- .../xiangshan/cache/mmu/TLBStorage.scala | 2 +- 6 files changed, 49 insertions(+), 45 deletions(-) diff --git a/src/main/scala/xiangshan/backend/datapath/BypassNetwork.scala b/src/main/scala/xiangshan/backend/datapath/BypassNetwork.scala index 8ebb01b937f..13b2453d862 100644 --- a/src/main/scala/xiangshan/backend/datapath/BypassNetwork.scala +++ b/src/main/scala/xiangshan/backend/datapath/BypassNetwork.scala @@ -78,7 +78,7 @@ class BypassNetwork()(implicit p: Parameters, params: BackendParams) extends XSM private val fromDPsRCData: Seq[Vec[UInt]] = io.fromDataPath.rcData.flatten.toSeq private val immInfo = io.fromDataPath.immInfo - println(s"[BypassNetwork] RCData num: ${fromDPsRCData.size}") + logger.debug(s"[BypassNetwork] RCData num: ${fromDPsRCData.size}") // (exuIdx, srcIdx, bypassExuIdx) private val forwardOrBypassValidVec3: MixedVec[Vec[Vec[Bool]]] = MixedVecInit( @@ -92,7 +92,7 @@ class BypassNetwork()(implicit p: Parameters, params: BackendParams) extends XSM } m := vecMask } - println(s"[BypassNetwork] ${x.bits.params.name} numRegSrc: ${x.bits.params.numRegSrc}") + logger.trace(s"[BypassNetwork] ${x.bits.params.name} numRegSrc: ${x.bits.params.numRegSrc}") VecInit(x.bits.l1ExuOH.getOrElse( // TODO: remove tmp max 1 for fake HYU1 VecInit(Seq.fill(x.bits.params.numRegSrc max 1)(VecInit(0.U(ExuVec.width.W).asBools))) @@ -115,7 +115,7 @@ class BypassNetwork()(implicit p: Parameters, params: BackendParams) extends XSM private val vfExuNum = params.vfSchdParams.get.numExu private val memExuNum = params.memSchdParams.get.numExu - println(s"[BypassNetwork] allExuNum: ${toExus.size} intExuNum: ${intExuNum} fpExuNum: ${fpExuNum} vfExuNum: ${vfExuNum} memExuNum: ${memExuNum}") + logger.trace(s"[BypassNetwork] allExuNum: ${toExus.size} intExuNum: ${intExuNum} fpExuNum: ${fpExuNum} vfExuNum: ${vfExuNum} memExuNum: ${memExuNum}") private val fromDPsHasBypass2Source = fromDPs.filter(x => x.bits.params.isIQWakeUpSource && x.bits.params.writeVfRf && (x.bits.params.isVfExeUnit || x.bits.params.hasLoadExu)).map(_.bits.params.exuIdx) private val fromDPsHasBypass2Sink = fromDPs.filter(x => x.bits.params.isIQWakeUpSink && x.bits.params.readVfRf && (x.bits.params.isVfExeUnit || x.bits.params.isMemExeUnit)).map(_.bits.params.exuIdx) @@ -135,9 +135,9 @@ class BypassNetwork()(implicit p: Parameters, params: BackendParams) extends XSM fromDPsHasBypass2Source.map(x => RegEnable(bypassDataVec(x), bypass2DateEn(x).asBool)) ) - println(s"[BypassNetwork] HasBypass2SourceExuNum: ${fromDPsHasBypass2Source.size} HasBypass2SinkExuNum: ${fromDPsHasBypass2Sink.size} bypass2DataVecSize: ${bypass2DataVec.length}") - println(s"[BypassNetwork] HasBypass2SourceExu: ${fromDPsHasBypass2Source}") - println(s"[BypassNetwork] HasBypass2SinkExu: ${fromDPsHasBypass2Sink}") + logger.trace(s"[BypassNetwork] HasBypass2SourceExuNum: ${fromDPsHasBypass2Source.size} HasBypass2SinkExuNum: ${fromDPsHasBypass2Sink.size} bypass2DataVecSize: ${bypass2DataVec.length}") + logger.trace(s"[BypassNetwork] HasBypass2SourceExu: ${fromDPsHasBypass2Source}") + logger.trace(s"[BypassNetwork] HasBypass2SinkExu: ${fromDPsHasBypass2Sink}") toExus.zip(fromDPs).foreach { case (sink, source) => sink <> source @@ -165,7 +165,7 @@ class BypassNetwork()(implicit p: Parameters, params: BackendParams) extends XSM val readRegCache = if (exuParm.needReadRegCache) exuInput.bits.dataSources(srcIdx).readRegCache else false.B val readImm = if (exuParm.immType.nonEmpty || exuParm.hasLoadExu) exuInput.bits.dataSources(srcIdx).readImm else false.B val bypass2ExuIdx = fromDPsHasBypass2Sink.indexOf(exuIdx) - println(s"${exuParm.name}: bypass2ExuIdx is ${bypass2ExuIdx}") + logger.trace(s"${exuParm.name}: bypass2ExuIdx is ${bypass2ExuIdx}") val readBypass2 = if (bypass2ExuIdx >= 0) dataSource.readBypass2 else false.B src := Mux1H( Seq( @@ -200,7 +200,7 @@ class BypassNetwork()(implicit p: Parameters, params: BackendParams) extends XSM fromExus.zip(bypassDataVec).filter(_._1.bits.params.needWriteRegCache).map(_._2) ) - println(s"[BypassNetwork] WriteRegCacheExuNum: ${forwardIntWenVec.size}") + logger.debug(s"[BypassNetwork] WriteRegCacheExuNum: ${forwardIntWenVec.size}") io.toDataPath.zipWithIndex.foreach{ case (x, i) => x.wen := bypassIntWenVec(i) diff --git a/src/main/scala/xiangshan/backend/datapath/DataPath.scala b/src/main/scala/xiangshan/backend/datapath/DataPath.scala index 3ad5ef5f0ca..39f84b751c5 100644 --- a/src/main/scala/xiangshan/backend/datapath/DataPath.scala +++ b/src/main/scala/xiangshan/backend/datapath/DataPath.scala @@ -3,6 +3,7 @@ package xiangshan.backend.datapath import org.chipsalliance.cde.config.Parameters import chisel3._ import chisel3.util._ +import com.typesafe.scalalogging.LazyLogging import difftest.{DiffArchFpRegState, DiffArchIntRegState, DiffArchVecRegState, DifftestModule} import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp} import utility._ @@ -21,18 +22,18 @@ import xiangshan.backend.regcache._ import xiangshan.backend.fu.FuType.is0latency import xiangshan.mem.{LqPtr, SqPtr} -class DataPath(params: BackendParams)(implicit p: Parameters) extends LazyModule { +class DataPath(params: BackendParams)(implicit p: Parameters) extends LazyModule with LazyLogging { override def shouldBeInlined: Boolean = false private implicit val dpParams: BackendParams = params lazy val module = new DataPathImp(this) - println(s"[DataPath] Preg Params: ") - println(s"[DataPath] Int R(${params.getRfReadSize(IntData())}), W(${params.getRfWriteSize(IntData())}) ") - println(s"[DataPath] Fp R(${params.getRfReadSize(FpData())}), W(${params.getRfWriteSize(FpData())}) ") - println(s"[DataPath] Vf R(${params.getRfReadSize(VecData())}), W(${params.getRfWriteSize(VecData())}) ") - println(s"[DataPath] V0 R(${params.getRfReadSize(V0Data())}), W(${params.getRfWriteSize(V0Data())}) ") - println(s"[DataPath] Vl R(${params.getRfReadSize(VlData())}), W(${params.getRfWriteSize(VlData())}) ") + logger.debug(s"[DataPath] Preg Params: ") + logger.debug(s"[DataPath] Int R(${params.getRfReadSize(IntData())}), W(${params.getRfWriteSize(IntData())}) ") + logger.debug(s"[DataPath] Fp R(${params.getRfReadSize(FpData())}), W(${params.getRfWriteSize(FpData())}) ") + logger.debug(s"[DataPath] Vf R(${params.getRfReadSize(VecData())}), W(${params.getRfWriteSize(VecData())}) ") + logger.debug(s"[DataPath] V0 R(${params.getRfReadSize(V0Data())}), W(${params.getRfWriteSize(V0Data())}) ") + logger.debug(s"[DataPath] Vl R(${params.getRfReadSize(VlData())}), W(${params.getRfWriteSize(VlData())}) ") } class DataPathImp(override val wrapper: DataPath)(implicit p: Parameters, params: BackendParams) @@ -46,8 +47,8 @@ class DataPathImp(override val wrapper: DataPath)(implicit p: Parameters, params private val (fromVfIQ, toVfIQ, toVfExu ) = (io.fromVfIQ, io.toVfIQ, io.toVecExu) private val (fromVecExcp, toVecExcp) = (io.fromVecExcpMod, io.toVecExcpMod) - println(s"[DataPath] IntIQ(${fromIntIQ.size}), FpIQ(${fromFpIQ.size}), VecIQ(${fromVfIQ.size}), MemIQ(${fromMemIQ.size})") - println(s"[DataPath] IntExu(${fromIntIQ.map(_.size).sum}), FpExu(${fromFpIQ.map(_.size).sum}), VecExu(${fromVfIQ.map(_.size).sum}), MemExu(${fromMemIQ.map(_.size).sum})") + logger.debug(s"[DataPath] IntIQ(${fromIntIQ.size}), FpIQ(${fromFpIQ.size}), VecIQ(${fromVfIQ.size}), MemIQ(${fromMemIQ.size})") + logger.debug(s"[DataPath] IntExu(${fromIntIQ.map(_.size).sum}), FpExu(${fromFpIQ.map(_.size).sum}), VecExu(${fromVfIQ.map(_.size).sum}), MemExu(${fromMemIQ.map(_.size).sum})") // just refences for convience private val fromIQ: Seq[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] = (fromIntIQ ++ fromFpIQ ++ fromVfIQ ++ fromMemIQ).toSeq @@ -442,7 +443,7 @@ class DataPathImp(override val wrapper: DataPath)(implicit p: Parameters, params addr := io.diffVlRat.get } - println(s"[DataPath] " + + logger.trace(s"[DataPath] " + s"has intDiffRead: ${intDiffRead.nonEmpty}, " + s"has fpDiffRead: ${fpDiffRead.nonEmpty}, " + s"has vecDiffRead: ${vfDiffRead.nonEmpty}, " + @@ -466,7 +467,7 @@ class DataPathImp(override val wrapper: DataPath)(implicit p: Parameters, params fromMemIQ.flatten.filter(_.bits.exuParams.numIntSrc > 0).flatMap(IssueBundle2RCReadPort(_)) private val regCacheReadData = regCache.io.readPorts.map(_.data) - println(s"[DataPath] regCache readPorts size: ${regCache.io.readPorts.size}, regCacheReadReq size: ${regCacheReadReq.size}") + logger.debug(s"[DataPath] regCache readPorts size: ${regCache.io.readPorts.size}, regCacheReadReq size: ${regCacheReadReq.size}") require(regCache.io.readPorts.size == regCacheReadReq.size, "reg cache's readPorts size should be equal to regCacheReadReq") regCache.io.readPorts.zip(regCacheReadReq).foreach{ case (r, req) => @@ -485,8 +486,8 @@ class DataPathImp(override val wrapper: DataPath)(implicit p: Parameters, params s1_data := rdata } - println(s"[DataPath] s1_RCReadData.int.size: ${s1_RCReadData.zip(toExu).filter(_._2.map(_.bits.params.isIntExeUnit).reduce(_ || _)).flatMap(_._1).flatten.size}, RCRdata.int.size: ${params.getIntExuRCReadSize}") - println(s"[DataPath] s1_RCReadData.mem.size: ${s1_RCReadData.zip(toExu).filter(_._2.map(x => x.bits.params.isMemExeUnit && x.bits.params.readIntRf).reduce(_ || _)).flatMap(_._1).flatten.size}, RCRdata.mem.size: ${params.getMemExuRCReadSize}") + logger.debug(s"[DataPath] s1_RCReadData.int.size: ${s1_RCReadData.zip(toExu).filter(_._2.map(_.bits.params.isIntExeUnit).reduce(_ || _)).flatMap(_._1).flatten.size}, RCRdata.int.size: ${params.getIntExuRCReadSize}") + logger.debug(s"[DataPath] s1_RCReadData.mem.size: ${s1_RCReadData.zip(toExu).filter(_._2.map(x => x.bits.params.isMemExeUnit && x.bits.params.readIntRf).reduce(_ || _)).flatMap(_._1).flatten.size}, RCRdata.mem.size: ${params.getMemExuRCReadSize}") io.toWakeupQueueRCIdx := regCache.io.toWakeupQueueRCIdx io.toBypassNetworkRCData := s1_RCReadData @@ -520,7 +521,7 @@ class DataPathImp(override val wrapper: DataPath)(implicit p: Parameters, params val rfrPortConfigs = schdParams.map(_.issueBlockParams).flatten.map(_.exuBlockParams.map(_.rfrPortConfigs)) - println(s"[DataPath] s1_intPregRData.flatten.flatten.size: ${s1_intPregRData.flatten.flatten.size}, intRfRdata.size: ${intRfRdata.size}") + logger.trace(s"[DataPath] s1_intPregRData.flatten.flatten.size: ${s1_intPregRData.flatten.flatten.size}, intRfRdata.size: ${intRfRdata.size}") s1_intPregRData.foreach(_.foreach(_.foreach(_ := 0.U))) s1_intPregRData.zip(rfrPortConfigs).foreach { case (iqRdata, iqCfg) => iqRdata.zip(iqCfg).foreach { case (iuRdata, iuCfg) => @@ -530,7 +531,7 @@ class DataPathImp(override val wrapper: DataPath)(implicit p: Parameters, params } } - println(s"[DataPath] s1_fpPregRData.flatten.flatten.size: ${s1_fpPregRData.flatten.flatten.size}, fpRfRdata.size: ${fpRfRdata.size}") + logger.trace(s"[DataPath] s1_fpPregRData.flatten.flatten.size: ${s1_fpPregRData.flatten.flatten.size}, fpRfRdata.size: ${fpRfRdata.size}") s1_fpPregRData.foreach(_.foreach(_.foreach(_ := 0.U))) s1_fpPregRData.zip(rfrPortConfigs).foreach { case (iqRdata, iqCfg) => iqRdata.zip(iqCfg).foreach { case (iuRdata, iuCfg) => @@ -540,7 +541,7 @@ class DataPathImp(override val wrapper: DataPath)(implicit p: Parameters, params } } - println(s"[DataPath] s1_vfPregRData.flatten.flatten.size: ${s1_vfPregRData.flatten.flatten.size}, vfRfRdata.size: ${vfRfRdata.size}") + logger.trace(s"[DataPath] s1_vfPregRData.flatten.flatten.size: ${s1_vfPregRData.flatten.flatten.size}, vfRfRdata.size: ${vfRfRdata.size}") s1_vfPregRData.foreach(_.foreach(_.foreach(_ := 0.U))) s1_vfPregRData.zip(rfrPortConfigs).foreach{ case(iqRdata, iqCfg) => iqRdata.zip(iqCfg).foreach{ case(iuRdata, iuCfg) => @@ -550,7 +551,7 @@ class DataPathImp(override val wrapper: DataPath)(implicit p: Parameters, params } } - println(s"[DataPath] s1_v0PregRData.flatten.flatten.size: ${s1_v0PregRData.flatten.flatten.size}, v0RfRdata.size: ${v0RfRdata.size}") + logger.trace(s"[DataPath] s1_v0PregRData.flatten.flatten.size: ${s1_v0PregRData.flatten.flatten.size}, v0RfRdata.size: ${v0RfRdata.size}") s1_v0PregRData.foreach(_.foreach(_.foreach(_ := 0.U))) s1_v0PregRData.zip(rfrPortConfigs).foreach{ case(iqRdata, iqCfg) => iqRdata.zip(iqCfg).foreach{ case(iuRdata, iuCfg) => @@ -560,7 +561,7 @@ class DataPathImp(override val wrapper: DataPath)(implicit p: Parameters, params } } - println(s"[DataPath] s1_vlPregRData.flatten.flatten.size: ${s1_vlPregRData.flatten.flatten.size}, vlRfRdata.size: ${vlRfRdata.size}") + logger.trace(s"[DataPath] s1_vlPregRData.flatten.flatten.size: ${s1_vlPregRData.flatten.flatten.size}, vlRfRdata.size: ${vlRfRdata.size}") s1_vlPregRData.foreach(_.foreach(_.foreach(_ := 0.U))) s1_vlPregRData.zip(rfrPortConfigs).foreach{ case(iqRdata, iqCfg) => iqRdata.zip(iqCfg).foreach{ case(iuRdata, iuCfg) => diff --git a/src/main/scala/xiangshan/backend/datapath/WbArbiter.scala b/src/main/scala/xiangshan/backend/datapath/WbArbiter.scala index e6f0394e294..1fba0c45c3b 100644 --- a/src/main/scala/xiangshan/backend/datapath/WbArbiter.scala +++ b/src/main/scala/xiangshan/backend/datapath/WbArbiter.scala @@ -138,7 +138,7 @@ class WbDataPath(params: BackendParams)(implicit p: Parameters) extends XSModule } val wbReplaceVld = fromExuPre val vldIdx: Seq[Int] = vldMgu.map(x => fromExuPre.indexWhere(_.bits.params == x.params)) - println("vldIdx: " + vldIdx) + logger.trace("vldIdx: " + vldIdx) vldIdx.zip(vldMgu).foreach{ case (id, wb) => wbReplaceVld.update(id, wb.io.writebackAfterMerge) } @@ -216,7 +216,7 @@ class WbDataPath(params: BackendParams)(implicit p: Parameters) extends XSModule intArbiterInput.bits := RegEnable(exuOut.bits, exuOut.valid) } - println(s"[WbDataPath] exu: ${exuOut.bits.params.exuIdx}, uncertain: ${exuOut.bits.params.hasUncertainLatency}, certain: ${exuOut.bits.params.latencyCertain}") + logger.debug(s"[WbDataPath] exu: ${exuOut.bits.params.exuIdx}, uncertain: ${exuOut.bits.params.hasUncertainLatency}, certain: ${exuOut.bits.params.latencyCertain}") // only EXUs with uncertain latency need result of arbiter // the result data can be maintained until getting success in arbiter @@ -254,31 +254,31 @@ class WbDataPath(params: BackendParams)(implicit p: Parameters) extends XSModule v0ArbiterInputsWireN.foreach(_.ready := false.B) vlArbiterInputsWireN.foreach(_.ready := false.B) - println(s"[WbDataPath] write int preg: " + + logger.debug(s"[WbDataPath] write int preg: " + s"IntExu(${io.fromIntExu.flatten.count(_.bits.params.writeIntRf)}) " + s"FpExu(${io.fromFpExu.flatten.count(_.bits.params.writeIntRf)}) " + s"VfExu(${io.fromVfExu.flatten.count(_.bits.params.writeIntRf)}) " + s"MemExu(${io.fromMemExu.flatten.count(_.bits.params.writeIntRf)})" ) - println(s"[WbDataPath] write fp preg: " + + logger.debug(s"[WbDataPath] write fp preg: " + s"IntExu(${io.fromIntExu.flatten.count(_.bits.params.writeFpRf)}) " + s"FpExu(${io.fromFpExu.flatten.count(_.bits.params.writeFpRf)}) " + s"VfExu(${io.fromVfExu.flatten.count(_.bits.params.writeFpRf)}) " + s"MemExu(${io.fromMemExu.flatten.count(_.bits.params.writeFpRf)})" ) - println(s"[WbDataPath] write vf preg: " + + logger.debug(s"[WbDataPath] write vf preg: " + s"IntExu(${io.fromIntExu.flatten.count(_.bits.params.writeVfRf)}) " + s"FpExu(${io.fromFpExu.flatten.count(_.bits.params.writeVfRf)}) " + s"VfExu(${io.fromVfExu.flatten.count(_.bits.params.writeVfRf)}) " + s"MemExu(${io.fromMemExu.flatten.count(_.bits.params.writeVfRf)})" ) - println(s"[WbDataPath] write v0 preg: " + + logger.debug(s"[WbDataPath] write v0 preg: " + s"IntExu(${io.fromIntExu.flatten.count(_.bits.params.writeV0Rf)}) " + s"FpExu(${io.fromFpExu.flatten.count(_.bits.params.writeV0Rf)}) " + s"VfExu(${io.fromVfExu.flatten.count(_.bits.params.writeV0Rf)}) " + s"MemExu(${io.fromMemExu.flatten.count(_.bits.params.writeV0Rf)})" ) - println(s"[WbDataPath] write vl preg: " + + logger.debug(s"[WbDataPath] write vl preg: " + s"IntExu(${io.fromIntExu.flatten.count(_.bits.params.writeVlRf)}) " + s"FpExu(${io.fromFpExu.flatten.count(_.bits.params.writeVlRf)}) " + s"VfExu(${io.fromVfExu.flatten.count(_.bits.params.writeVlRf)}) " + @@ -291,11 +291,11 @@ class WbDataPath(params: BackendParams)(implicit p: Parameters) extends XSModule private val vfWbArbiter = Module(new RealWBCollideChecker(params.getVfWbArbiterParams)) private val v0WbArbiter = Module(new RealWBCollideChecker(params.getV0WbArbiterParams)) private val vlWbArbiter = Module(new RealWBCollideChecker(params.getVlWbArbiterParams)) - println(s"[WbDataPath] int preg write back port num: ${intWbArbiter.io.out.size}, active port: ${intWbArbiter.io.inGroup.keys.toSeq.sorted}") - println(s"[WbDataPath] fp preg write back port num: ${fpWbArbiter.io.out.size}, active port: ${fpWbArbiter.io.inGroup.keys.toSeq.sorted}") - println(s"[WbDataPath] vf preg write back port num: ${vfWbArbiter.io.out.size}, active port: ${vfWbArbiter.io.inGroup.keys.toSeq.sorted}") - println(s"[WbDataPath] v0 preg write back port num: ${v0WbArbiter.io.out.size}, active port: ${v0WbArbiter.io.inGroup.keys.toSeq.sorted}") - println(s"[WbDataPath] vl preg write back port num: ${vlWbArbiter.io.out.size}, active port: ${vlWbArbiter.io.inGroup.keys.toSeq.sorted}") + logger.debug(s"[WbDataPath] int preg write back port num: ${intWbArbiter.io.out.size}, active port: ${intWbArbiter.io.inGroup.keys.toSeq.sorted}") + logger.debug(s"[WbDataPath] fp preg write back port num: ${fpWbArbiter.io.out.size}, active port: ${fpWbArbiter.io.inGroup.keys.toSeq.sorted}") + logger.debug(s"[WbDataPath] vf preg write back port num: ${vfWbArbiter.io.out.size}, active port: ${vfWbArbiter.io.inGroup.keys.toSeq.sorted}") + logger.debug(s"[WbDataPath] v0 preg write back port num: ${v0WbArbiter.io.out.size}, active port: ${v0WbArbiter.io.inGroup.keys.toSeq.sorted}") + logger.debug(s"[WbDataPath] vl preg write back port num: ${vlWbArbiter.io.out.size}, active port: ${vlWbArbiter.io.inGroup.keys.toSeq.sorted}") // module assign intWbArbiter.io.flush <> io.flush diff --git a/src/main/scala/xiangshan/backend/dispatch/Dispatch2IqFpImp.scala b/src/main/scala/xiangshan/backend/dispatch/Dispatch2IqFpImp.scala index ed3ed0b2cde..7052da69c76 100644 --- a/src/main/scala/xiangshan/backend/dispatch/Dispatch2IqFpImp.scala +++ b/src/main/scala/xiangshan/backend/dispatch/Dispatch2IqFpImp.scala @@ -32,11 +32,11 @@ class Dispatch2IqFpImp(override val wrapper: Dispatch2Iq)(implicit p: Parameters } val fuConfigOnlyOneIQ = fuConfigMapIQ.filter(x => x._2.count(_ == true) == 1).map(_._1) val fuConfigMultiIQ = fuConfigMapIQ.filter(x => x._2.count(_ == true) > 1).map(_._1) - println(s"[Dispatch2IqFpImp] IQFuConfigs: ${IQFuConfigs.map(_.map(_.name))}") - println(s"[Dispatch2IqFpImp] allFuConfigs: ${allFuConfigs.map(_.name)}") - println(s"[Dispatch2IqFpImp] fuConfigMapIQ: ${fuConfigMapIQ.map(x => (x._1.name, x._2))}") - println(s"[Dispatch2IqFpImp] fuConfigOnlyOneIQ: ${fuConfigOnlyOneIQ.map(_.name)}") - println(s"[Dispatch2IqFpImp] fuConfigMultiIQ: ${fuConfigMultiIQ.map(_.name)}") + logger.debug(s"[Dispatch2IqFpImp] IQFuConfigs: ${IQFuConfigs.map(_.map(_.name))}") + logger.debug(s"[Dispatch2IqFpImp] allFuConfigs: ${allFuConfigs.map(_.name)}") + logger.debug(s"[Dispatch2IqFpImp] fuConfigMapIQ: ${fuConfigMapIQ.map(x => (x._1.name, x._2))}") + logger.debug(s"[Dispatch2IqFpImp] fuConfigOnlyOneIQ: ${fuConfigOnlyOneIQ.map(_.name)}") + logger.debug(s"[Dispatch2IqFpImp] fuConfigMultiIQ: ${fuConfigMultiIQ.map(_.name)}") val uopsInFuType = VecInit(uopsIn.map(x => Mux(x.valid, x.bits.fuType, 0.U.asTypeOf(x.bits.fuType)))) val inFuTypeIsMultiIQ = VecInit(uopsInFuType.map{ case x => VecInit(fuConfigMultiIQ.map{ case y => x(y.fuType.id)}) diff --git a/src/main/scala/xiangshan/cache/mmu/TLB.scala b/src/main/scala/xiangshan/cache/mmu/TLB.scala index 0a9da82b12f..06aa721cebf 100644 --- a/src/main/scala/xiangshan/cache/mmu/TLB.scala +++ b/src/main/scala/xiangshan/cache/mmu/TLB.scala @@ -610,7 +610,10 @@ class TLB(Width: Int, nRespDups: Int = 1, Block: Seq[Boolean], q: TLBParameters) } XSDebug(ptw.resp.valid, p"L2TLB resp:${ptw.resp.bits} (v:${ptw.resp.valid}r:${ptw.resp.ready}) \n") - println(s"${q.name}: page: ${q.NWays} ${q.Associative} ${q.Replacer.get}") + logger.info(s"${q.name}:") + logger.info(s" entries: ${q.NWays}") + logger.info(s" associative: ${q.Associative}") + logger.info(s" replacer: ${q.Replacer.get}") if (env.EnableDifftest) { for (i <- 0 until Width) { diff --git a/src/main/scala/xiangshan/cache/mmu/TLBStorage.scala b/src/main/scala/xiangshan/cache/mmu/TLBStorage.scala index 84bce40bb19..d8e857d2a30 100644 --- a/src/main/scala/xiangshan/cache/mmu/TLBStorage.scala +++ b/src/main/scala/xiangshan/cache/mmu/TLBStorage.scala @@ -263,7 +263,7 @@ class TLBFA( ) generatePerfEvent() - println(s"${parentName} tlb_fa: nSets${nSets} nWays:${nWays}") + logger.trace(s"${parentName} tlb_fa: nSets${nSets} nWays:${nWays}") } class TLBFakeFA( From a43e56dfd67366cc465547e0324e61693e7da6f6 Mon Sep 17 00:00:00 2001 From: Easton Man Date: Tue, 3 Dec 2024 13:42:27 +0800 Subject: [PATCH 6/7] feat(logging): add switch VERBOSE in Makefile --- Makefile | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/Makefile b/Makefile index 4bb99ed4ba8..f9f2d6280d8 100644 --- a/Makefile +++ b/Makefile @@ -112,6 +112,12 @@ ifeq ($(WITH_CONSTANTIN),1) override SIM_ARGS += --with-constantin endif +# verbose compile-time logging +ifeq ($(VERBOSE),1) +RELEASE_ARGS += --verbose +DEBUG_ARGS += --verbose +endif + # emu for the release version RELEASE_ARGS += --fpga-platform --disable-all --remove-assert --reset-gen --firtool-opt --ignore-read-enable-mem DEBUG_ARGS += --enable-difftest From d7fa5d3f6152e2d9b4482e41eccfee2f8f1f3483 Mon Sep 17 00:00:00 2001 From: Easton Man Date: Tue, 3 Dec 2024 15:00:09 +0800 Subject: [PATCH 7/7] feat(logging): transform more --- src/main/resources/logback.xml | 2 +- src/main/scala/xiangshan/backend/MemBlock.scala | 2 +- .../xiangshan/backend/fu/NewCSR/CommitIDModule.scala | 7 ++++--- .../xiangshan/backend/fu/vector/ByteMaskTailGen.scala | 9 +++++---- .../scala/xiangshan/backend/issue/ImmExtractor.scala | 5 +++-- src/main/scala/xiangshan/backend/regcache/RegCache.scala | 6 +++--- src/main/scala/xiangshan/mem/lsqueue/FreeList.scala | 2 +- 7 files changed, 18 insertions(+), 15 deletions(-) diff --git a/src/main/resources/logback.xml b/src/main/resources/logback.xml index d361853bbc4..263c30754bb 100644 --- a/src/main/resources/logback.xml +++ b/src/main/resources/logback.xml @@ -1,4 +1,4 @@ - + %gray(%d{yyyy-MM-dd HH:mm:ss}) %highlight(%-5level) %cyan(%-24logger{24}) | %msg%n diff --git a/src/main/scala/xiangshan/backend/MemBlock.scala b/src/main/scala/xiangshan/backend/MemBlock.scala index 0dde7f92c88..457e023b182 100644 --- a/src/main/scala/xiangshan/backend/MemBlock.scala +++ b/src/main/scala/xiangshan/backend/MemBlock.scala @@ -1924,7 +1924,7 @@ class MemBlockInlinedImp(outer: MemBlockInlined) extends LazyModuleImp(outer) if (printEventCoding) { for (((name, inc), i) <- allPerfEvents.zipWithIndex) { - println("MemBlock perfEvents Set", name, inc, i) + logger.trace("MemBlock perfEvents Set", name, inc, i) } } diff --git a/src/main/scala/xiangshan/backend/fu/NewCSR/CommitIDModule.scala b/src/main/scala/xiangshan/backend/fu/NewCSR/CommitIDModule.scala index e916ff5f542..8163f531306 100644 --- a/src/main/scala/xiangshan/backend/fu/NewCSR/CommitIDModule.scala +++ b/src/main/scala/xiangshan/backend/fu/NewCSR/CommitIDModule.scala @@ -1,10 +1,11 @@ package xiangshan.backend.fu.NewCSR import chisel3._ +import com.typesafe.scalalogging.LazyLogging import java.util.Properties -class CommitIDModule(shaWidth: Int) extends Module { +class CommitIDModule(shaWidth: Int) extends Module with LazyLogging { val io = IO(new Bundle { val commitID = Output(UInt(shaWidth.W)) val dirty = Output(Bool()) @@ -16,8 +17,8 @@ class CommitIDModule(shaWidth: Int) extends Module { val sha = props.get("SHA").asInstanceOf[String].take(shaWidth / 4) val dirty = props.get("dirty").asInstanceOf[String].toInt - println(s"[CommitIDModule] SHA=$sha") - println(s"[CommitIDModule] dirty=$dirty") + logger.info(s"commit SHA=$sha") + logger.info(s"dirty=${if (dirty == 1) "true" else "false" }") io.commitID := BigInt(sha, 16).U(shaWidth.W) io.dirty := dirty.U diff --git a/src/main/scala/xiangshan/backend/fu/vector/ByteMaskTailGen.scala b/src/main/scala/xiangshan/backend/fu/vector/ByteMaskTailGen.scala index 31e5cbbec08..09562116d89 100644 --- a/src/main/scala/xiangshan/backend/fu/vector/ByteMaskTailGen.scala +++ b/src/main/scala/xiangshan/backend/fu/vector/ByteMaskTailGen.scala @@ -3,6 +3,7 @@ package xiangshan.backend.fu.vector import org.chipsalliance.cde.config.Parameters import chisel3._ import chisel3.util._ +import com.typesafe.scalalogging.LazyLogging import xiangshan.backend.fu.vector.Bundles.{VSew, Vl} import xiangshan.backend.fu.vector.utils.{MaskExtractor, UIntToContLow0s, UIntToContLow1s} import utility.XSDebug @@ -10,12 +11,12 @@ import yunsuan.vector.SewOH import yunsuan.util.LookupTree -class ByteMaskTailGenIO(vlen: Int)(implicit p: Parameters) extends Bundle { +class ByteMaskTailGenIO(vlen: Int)(implicit p: Parameters) extends Bundle with LazyLogging { private val numBytes = vlen / 8 private val maxVLMUL = 8 private val maxVLMAX = 8 * 16 // TODO: parameterize this private val elemIdxWidth = log2Up(maxVLMAX + 1) - println(s"elemIdxWidth: $elemIdxWidth") + logger.trace(s"elemIdxWidth: $elemIdxWidth") val in = Input(new Bundle { val begin = UInt(elemIdxWidth.W) @@ -43,7 +44,7 @@ class ByteMaskTailGenIO(vlen: Int)(implicit p: Parameters) extends Bundle { }) } -class ByteMaskTailGen(vlen: Int)(implicit p: Parameters) extends Module { +class ByteMaskTailGen(vlen: Int)(implicit p: Parameters) extends Module with LazyLogging { require(isPow2(vlen)) private val numBytes = vlen / 8 @@ -52,7 +53,7 @@ class ByteMaskTailGen(vlen: Int)(implicit p: Parameters) extends Module { private val maxVLMAX = 8 * 16 // TODO: parameterize this private val elemIdxWidth = log2Up(maxVLMAX + 1) - println(s"numBytes: ${numBytes}, byteWidth: ${byteWidth}") + logger.trace(s"numBytes: ${numBytes}, byteWidth: ${byteWidth}") val io = IO(new ByteMaskTailGenIO(vlen)) diff --git a/src/main/scala/xiangshan/backend/issue/ImmExtractor.scala b/src/main/scala/xiangshan/backend/issue/ImmExtractor.scala index 06c6e570300..706f0f5381d 100644 --- a/src/main/scala/xiangshan/backend/issue/ImmExtractor.scala +++ b/src/main/scala/xiangshan/backend/issue/ImmExtractor.scala @@ -2,6 +2,7 @@ package xiangshan.backend.issue import chisel3._ import chisel3.util._ +import com.typesafe.scalalogging.LazyLogging import fudian.utils.SignExt import xiangshan.SelImm import xiangshan.backend.decode.ImmUnion @@ -19,7 +20,7 @@ class ImmExtractorIO(dataBits: Int) extends Bundle { }) } -class ImmExtractor(dataBits: Int, immTypeSet: Set[BigInt]) extends Module { +class ImmExtractor(dataBits: Int, immTypeSet: Set[BigInt]) extends Module with LazyLogging { val io = IO(new ImmExtractorIO(dataBits)) val extractMap = Map( @@ -39,7 +40,7 @@ class ImmExtractor(dataBits: Int, immTypeSet: Set[BigInt]) extends Module { ) val usedMap: Seq[(BigInt, UInt)] = extractMap.view.filterKeys(x => immTypeSet.contains(x)).toSeq.sortWith(_._1 < _._1) - println(usedMap) + logger.trace(usedMap.toString()) io.out.imm := MuxLookup(io.in.immType, 0.U)(usedMap.map { case (k, v) => (k.U, v) }.toSeq) } diff --git a/src/main/scala/xiangshan/backend/regcache/RegCache.scala b/src/main/scala/xiangshan/backend/regcache/RegCache.scala index 6a37da00802..edd13bdae9e 100644 --- a/src/main/scala/xiangshan/backend/regcache/RegCache.scala +++ b/src/main/scala/xiangshan/backend/regcache/RegCache.scala @@ -28,10 +28,10 @@ class RegCache()(implicit p: Parameters, params: BackendParams) extends XSModule val io = IO(new RegCacheIO()) - println(s"[RegCache] readPorts: ${params.getIntExuRCReadSize} + ${params.getMemExuRCReadSize}, " + + logger.debug(s"readPorts: ${params.getIntExuRCReadSize} + ${params.getMemExuRCReadSize}, " + s"writePorts: ${params.getIntExuRCWriteSize} + ${params.getMemExuRCWriteSize}") - - println(s"[RegCache] dataWidth: ${params.intSchdParams.get.rfDataWidth}, addrWidth: ${RegCacheIdxWidth}, tagWidth: ${params.intSchdParams.get.pregIdxWidth}") + logger.debug(s"dataWidth: ${params.intSchdParams.get.rfDataWidth}, " + + s"addrWidth: ${RegCacheIdxWidth}, tagWidth: ${params.intSchdParams.get.pregIdxWidth}") require(RegCacheIdxWidth == (log2Up(IntRegCacheSize) + 1), "IntRegCache should be half of the whole RegCache") require(RegCacheIdxWidth == (log2Up(MemRegCacheSize) + 1), "MemRegCache should be half of the whole RegCache") diff --git a/src/main/scala/xiangshan/mem/lsqueue/FreeList.scala b/src/main/scala/xiangshan/mem/lsqueue/FreeList.scala index 4861e70bc88..185f23a65e0 100644 --- a/src/main/scala/xiangshan/mem/lsqueue/FreeList.scala +++ b/src/main/scala/xiangshan/mem/lsqueue/FreeList.scala @@ -38,7 +38,7 @@ class FreeList(size: Int, allocWidth: Int, freeWidth: Int, enablePreAlloc: Boole val empty = Output(Bool()) }) - println(s"FreeList: $moduleName, size " + size) + logger.info(s"FreeList: $moduleName, size " + size) val freeList = RegInit(VecInit( // originally {0, 1, ..., size - 1} are free.