Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into to/#176-ts-meta-data
Browse files Browse the repository at this point in the history
# Conflicts:
#	src/main/scala/edu/ie3/simbench/convert/GridConverter.scala
#	src/main/scala/edu/ie3/simbench/main/RunSimbench.scala
  • Loading branch information
t-ober committed Jun 26, 2024
2 parents 660ca41 + 91091ea commit 423beeb
Show file tree
Hide file tree
Showing 23 changed files with 227 additions and 177 deletions.
2 changes: 1 addition & 1 deletion .github/dependabot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,4 +18,4 @@ updates:
- ">= 1.5.a, < 1.6"
- dependency-name: org.scalatest:scalatest_2.13
versions:
- "3.3.0-SNAP2"
- "3.3.0-SNAP+"
1 change: 0 additions & 1 deletion Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,6 @@ def getFeatureBranchProps() {
def getMasterBranchProps() {
properties([parameters(
[string(defaultValue: '', description: '', name: 'release', trim: true)]),
[$class: 'RebuildSettings', autoRebuild: false, rebuildDisabled: false],
[$class: 'ThrottleJobProperty', categories: [], limitOneJobWithMatchingParams: false, maxConcurrentPerNode: 0, maxConcurrentTotal: 0, paramsToUseForLimit: '', throttleEnabled: true, throttleOption: 'project']
])
}
Expand Down
28 changes: 14 additions & 14 deletions build.gradle
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
plugins {
id "com.jfrog.artifactory" version "4.31.9"
id "com.jfrog.artifactory" version "5.1.11"
id 'groovy' // groovy support
id 'java' // java support
id 'scala' // scala support
id 'pmd' // code check, working on source code
id 'com.diffplug.spotless' version '6.19.0'// code format
id "de.undercouch.download" version "5.4.0" // downloads plugin
id 'com.diffplug.spotless' version '6.25.0'// code format
id "de.undercouch.download" version "5.6.0" // downloads plugin
id 'jacoco' // java code coverage plugin
id "org.sonarqube" version "4.2.0.3129" // sonarqube
id 'org.scoverage' version '8.0.2' // Code coverage plugin for scala
id "org.sonarqube" version "5.0.0.4638" // sonarqube
id 'org.scoverage' version '8.1' // Code coverage plugin for scala
id "com.github.maiflai.scalatest" version "0.32" // run scalatest without specific task
}

Expand All @@ -17,9 +17,9 @@ ext {
javaVersion = JavaVersion.VERSION_17

scalaVersion = '2.13'
scalaBinaryVersion = '2.13.6'
scalaBinaryVersion = '2.13.14'
tscfgVersion = '0.9.986'
slf4jVersion = '2.0.7'
slf4jVersion = '2.0.13'

scriptsLocation = 'gradle' + File.separator + 'scripts' + File.separator //location of script plugins
}
Expand All @@ -46,26 +46,26 @@ repositories {

dependencies {
/* PowerSystemDataModel */
implementation('com.github.ie3-institute:PowerSystemDataModel:3.0.0') {
implementation('com.github.ie3-institute:PowerSystemDataModel:5.0.1') {
exclude group: 'org.apache.logging.log4j'
exclude group: 'org.slf4j'
/* Exclude our own nested dependencies */
exclude group: 'com.github.ie3-institute'
}

/* util functions */
implementation('com.github.ie3-institute:PowerSystemUtils:2.0') {
implementation('com.github.ie3-institute:PowerSystemUtils:2.2.1') {
exclude group: 'org.apache.logging.log4j'
exclude group: 'org.slf4j'
/* Exclude our own nested dependencies */
exclude group: 'com.github.ie3-institute'
}

implementation 'org.codehaus.groovy:groovy:3.0.17'
implementation 'org.codehaus.groovy:groovy:3.0.21'

implementation 'tech.units:indriya:2.1.4'
implementation 'tech.units:indriya:2.2'
implementation 'org.locationtech.jts:jts-core:1.19.0'
implementation 'commons-io:commons-io:2.13.0'
implementation 'commons-io:commons-io:2.16.1'

// logging
implementation 'org.apache.logging.log4j:log4j-api:+' // log4j
Expand All @@ -81,10 +81,10 @@ dependencies {
implementation 'org.scala-lang.modules:scala-parallel-collections_2.13:1.0.4'

// TEST Scala //
testImplementation "org.scalatest:scalatest_${scalaVersion}:3.2.10"
testImplementation "org.scalatest:scalatest_${scalaVersion}:3.2.19"
testImplementation 'com.vladsch.flexmark:flexmark-all:0.64.8'
testImplementation "org.pegdown:pegdown:1.6.0" // HTML report for scalatest
implementation 'org.mockito:mockito-core:5.3.1' // mocking framework
implementation 'org.mockito:mockito-core:5.12.0' // mocking framework

// config //
implementation 'com.typesafe:config:+'
Expand Down
1 change: 1 addition & 0 deletions gradle/scripts/scoverage.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
* https://github.com/scoverage/gradle-scoverage/issues/109 for details
*/
scoverage {
scoverageVersion = "2.1.1"
scoverageScalaVersion = scalaBinaryVersion
coverageOutputHTML = false
coverageOutputXML = true
Expand Down
22 changes: 6 additions & 16 deletions src/main/scala/edu/ie3/simbench/convert/GridConverter.scala
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package edu.ie3.simbench.convert
import com.typesafe.scalalogging.LazyLogging
import edu.ie3.datamodel.io.source.TimeSeriesMappingSource
import edu.ie3.datamodel.io.source.TimeSeriesMappingSource.MappingEntry
import edu.ie3.datamodel.models.input.NodeInput
import edu.ie3.datamodel.models.input.connector.{
LineInput,
SwitchInput,
Expand All @@ -20,10 +21,9 @@ import edu.ie3.datamodel.models.input.graphics.{
NodeGraphicInput
}
import edu.ie3.datamodel.models.input.system._
import edu.ie3.datamodel.models.input.NodeInput
import edu.ie3.datamodel.models.result.NodeResult
import edu.ie3.datamodel.models.timeseries.individual.IndividualTimeSeries
import edu.ie3.datamodel.models.value.{PValue, SValue, Value}
import edu.ie3.datamodel.models.value.{PValue, SValue}
import edu.ie3.simbench.convert.NodeConverter.AttributeOverride.{
JoinOverride,
SubnetOverride
Expand All @@ -33,20 +33,11 @@ import edu.ie3.simbench.convert.types.{
Transformer2wTypeConverter
}
import edu.ie3.simbench.exception.ConversionException
import edu.ie3.simbench.model.datamodel.{
GridModel,
Line,
Node,
NodePFResult,
Switch,
Transformer2W,
Transformer3W
}
import edu.ie3.simbench.model.datamodel._

import java.util.UUID
import scala.annotation.tailrec
import scala.jdk.CollectionConverters._
import scala.collection.parallel.CollectionConverters._
import scala.jdk.CollectionConverters._

case object GridConverter extends LazyLogging {

Expand Down Expand Up @@ -654,7 +645,7 @@ case object GridConverter extends LazyLogging {
val mappingEntries = participantsToTimeSeries.map {
case (model, (timeSeries, id)) =>
new TimeSeriesMappingSource.MappingEntry(
UUID.randomUUID(),

model.getUuid,
timeSeries.getUuid
)
Expand All @@ -677,8 +668,7 @@ case object GridConverter extends LazyLogging {
loadsToTimeSeries.keySet.asJava,
Set.empty[PvInput].asJava,
Set.empty[StorageInput].asJava,
Set.empty[WecInput].asJava,
Set.empty[EmInput].asJava
Set.empty[WecInput].asJava
),
timeSeries,
mappingEntries,
Expand Down
3 changes: 2 additions & 1 deletion src/main/scala/edu/ie3/simbench/convert/LoadConverter.scala
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import java.util.{Locale, UUID}
import edu.ie3.datamodel.models.OperationTime
import edu.ie3.datamodel.models.input.system.LoadInput
import edu.ie3.datamodel.models.input.system.characteristic.CosPhiFixed
import edu.ie3.datamodel.models.input.{NodeInput, OperatorInput}
import edu.ie3.datamodel.models.input.{EmInput, NodeInput, OperatorInput}
import edu.ie3.datamodel.models.profile.LoadProfile.DefaultLoadProfiles
import edu.ie3.datamodel.models.timeseries.individual.IndividualTimeSeries
import edu.ie3.datamodel.models.value.SValue
Expand Down Expand Up @@ -76,6 +76,7 @@ case object LoadConverter extends ShuntConverter {
OperationTime.notLimited(),
node,
new CosPhiFixed(varCharacteristicString),
null,
DefaultLoadProfiles.NO_LOAD_PROFILE,
false,
eCons,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,25 +1,22 @@
package edu.ie3.simbench.convert

import java.time.ZonedDateTime
import java.util.UUID

import edu.ie3.datamodel.models.input.NodeInput
import edu.ie3.datamodel.models.result.NodeResult
import edu.ie3.simbench.model.datamodel.NodePFResult
import edu.ie3.util.quantities.PowerSystemUnits.{PU, DEGREE_GEOM}
import edu.ie3.util.quantities.PowerSystemUnits.{DEGREE_GEOM, PU}
import tech.units.indriya.quantity.Quantities

import java.time.ZonedDateTime

/** Converts [[NodePFResult]] to [[NodeResult]]
*/
object NodePFResultConverter {
def convert(
input: NodePFResult,
node: NodeInput,
uuid: UUID = UUID.randomUUID(),
timeStamp: ZonedDateTime = ZonedDateTime.parse("1970-01-01T00:00:00Z")
): NodeResult = {
new NodeResult(
uuid,
timeStamp,
node.getUuid,
Quantities.getQuantity(input.vm.doubleValue, PU),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@ case object PowerPlantConverter extends ShuntConverter {
OperationTime.notLimited(),
node,
new CosPhiFixed(varCharacteristicString),
null,
sRated,
cosphi
) -> (timeSeries, profile.id)
Expand Down
1 change: 1 addition & 0 deletions src/main/scala/edu/ie3/simbench/convert/ResConverter.scala
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,7 @@ case object ResConverter extends ShuntConverter {
OperationTime.notLimited(),
node,
new CosPhiFixed(varCharacteristicString),
null,
sRated,
cosphi
) -> (timeSeries, profile.id)
Expand Down
36 changes: 18 additions & 18 deletions src/main/scala/edu/ie3/simbench/main/RunSimbench.scala
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package edu.ie3.simbench.main

import java.nio.file.Paths
import java.nio.file.{Path, Paths}
import edu.ie3.datamodel.io.naming.{
DefaultDirectoryHierarchy,
EntityPersistenceNamingStrategy,
Expand Down Expand Up @@ -96,25 +96,25 @@ object RunSimbench extends SimbenchHelper {
IoUtils.ensureHarmonizedAndTerminatingFileSeparator(
simbenchConfig.io.output.targetFolder
)

val hierarchyAdjustedBaseDir = if (simbenchConfig.io.output.csv.directoryHierarchy) baseTargetDirectory else baseTargetDirectory + simbenchCode

val fileNamingStrategy =
if (simbenchConfig.io.output.csv.directoryHierarchy) {
val csvSink = if (simbenchConfig.io.output.csv.directoryHierarchy) {
new CsvFileSink(
Path.of(baseTargetDirectory),
new FileNamingStrategy(
new EntityPersistenceNamingStrategy(),
new DefaultDirectoryHierarchy(baseTargetDirectory, simbenchCode)
)
} else {
new FileNamingStrategy()
}

val csvSink = new CsvFileSink(
hierarchyAdjustedBaseDir,
fileNamingStrategy,
false,
simbenchConfig.io.output.csv.separator
)
new DefaultDirectoryHierarchy(
Path.of(baseTargetDirectory),
simbenchCode
)
),
simbenchConfig.io.output.csv.separator
)
} else {
new CsvFileSink(
Path.of(baseTargetDirectory + simbenchCode),
new FileNamingStrategy(),
simbenchConfig.io.output.csv.separator
)
}

csvSink.persistJointGrid(jointGridContainer)
timeSeries.foreach(csvSink.persistTimeSeries(_))
Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
package edu.ie3.simbench.model.datamodel

import java.time.ZoneId
import java.util.Locale

import edu.ie3.simbench.io.HeadLineField
import edu.ie3.simbench.model.RawModelData
import edu.ie3.util.TimeUtil

import java.time.ZoneId
import java.time.format.DateTimeFormatter
import java.util.Locale

/** Common fields, every SimBench model has
*/
trait SimbenchModel {
Expand All @@ -26,8 +27,12 @@ object SimbenchModel {
*/
protected val ID: String = "id"

protected val simbenchTimeUtil =
new TimeUtil(ZoneId.of("UTC"), Locale.GERMANY, "dd.MM.yyyy HH:mm")
protected val simbenchTimeUtil: TimeUtil = new TimeUtil(
DateTimeFormatter
.ofPattern("dd.MM.yyyy HH:mm")
.withZone(ZoneId.of("UTC"))
.withLocale(Locale.GERMANY)
)

/** Get an Array of table fields denoting the mapping to the model's
* attributes
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ import edu.ie3.simbench.io.HeadLineField
import edu.ie3.simbench.io.HeadLineField.{MandatoryField, OptionalField}
import edu.ie3.simbench.model.RawModelData
import edu.ie3.simbench.model.datamodel.profiles.ProfileModel.ProfileCompanionObject
import edu.ie3.util.TimeUtil

/** A renewable energy source's profile consisting of an identifier and a
* mapping of the date to (p,q) pair
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,7 @@ class GridConverterSpec extends UnitSpec with SwitchTestingData {
.toVector
/* There is no participant uuid in mapping, that is not among participants */
timeSeriesMapping.exists(entry =>
!participantUuids.contains(entry.getParticipant)
!participantUuids.contains(entry.participant())
) shouldBe false

/* Evaluate the amount of converted power flow results */
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,21 +25,18 @@ class NodePFResultConverterSpec extends UnitSpec with ConverterTestData {

val actual = NodePFResultConverter.convert(input, node)

/* UUID is not checked, as it is random */
actual.getTime shouldBe expected.getTime
actual.getInputModel shouldBe expected.getInputModel
actual.getvMag shouldBe actual.getvMag()
actual.getvAng shouldBe expected.getvAng()
}

"convert a single result correctly with specified UUID" in {
val uuid = UUID.randomUUID()
"convert a single result correctly" in {
val (input, expected) = getNodeResultPair("1")
val (_, node) = getNodePair(input.node.id)

val actual = NodePFResultConverter.convert(input, node, uuid = uuid)
val actual = NodePFResultConverter.convert(input, node)

actual.getUuid shouldBe uuid
actual.getTime shouldBe expected.getTime
actual.getInputModel shouldBe expected.getInputModel
actual.getvMag shouldBe actual.getvMag()
Expand All @@ -54,7 +51,6 @@ class NodePFResultConverterSpec extends UnitSpec with ConverterTestData {
val actual =
NodePFResultConverter.convert(input, node, timeStamp = timeStamp)

/* UUID is not checked, as it is random */
actual.getTime shouldBe timeStamp
actual.getInputModel shouldBe expected.getInputModel
actual.getvMag shouldBe actual.getvMag()
Expand Down
Loading

0 comments on commit 423beeb

Please sign in to comment.