Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Scala 3 Migration #209

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 6 additions & 2 deletions .github/workflows/pr-build.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,11 @@ jobs:
distribution: 'adopt'
- name: Run mill tests
run: ./mill __.test
- name: Compile with sbt
- name: Compile Scala 2.13 with sbt
run: sbt ++2.13 compile bench/compile bench/jmh:compile
- name: Compile Scala 2.13 with sbt
run: sbt compile bench/compile bench/jmh:compile
- name: Run sbt tests
- name: Run sbt tests (Scala 2.13)
run: sbt ++2.13 test
- name: Run sbt tests (Scala 3)
run: sbt test
5 changes: 3 additions & 2 deletions bench/src/main/scala/sjsonnet/MaterializerBenchmark.scala
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import java.util.concurrent.TimeUnit
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra._
import ujson.JsVisitor
import scala.compiletime.uninitialized

@BenchmarkMode(Array(Mode.AverageTime))
@Fork(2)
Expand All @@ -16,8 +17,8 @@ import ujson.JsVisitor
@State(Scope.Benchmark)
class MaterializerBenchmark {

private var interp: Interpreter = _
private var value: Val = _
private var interp: Interpreter = uninitialized
private var value: Val = uninitialized

@Setup
def setup(): Unit = {
Expand Down
10 changes: 6 additions & 4 deletions bench/src/main/scala/sjsonnet/OptimizerBenchmark.scala
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra._

import scala.collection.mutable
import scala.compiletime.uninitialized

@BenchmarkMode(Array(Mode.AverageTime))
@Fork(2)
Expand All @@ -20,16 +21,17 @@ import scala.collection.mutable
@State(Scope.Benchmark)
class OptimizerBenchmark {

private var inputs: Iterable[(Expr, FileScope)] = _
private var allFiles: IndexedSeq[(Path, String)] = _
private var ev: EvalScope = _
private var inputs: Iterable[(Expr, FileScope)] = uninitialized
private var allFiles: IndexedSeq[(Path, String)] = uninitialized
private var ev: EvalScope = uninitialized

@Setup
def setup(): Unit = {
val (allFiles, ev) = MainBenchmark.findFiles()
this.inputs = allFiles.map { case (p, s) =>
fastparse.parse(s, new Parser(p, true, mutable.HashMap.empty, mutable.HashMap.empty).document(_)) match {
fastparse.parse(s, new Parser(p, true, mutable.HashMap.empty, mutable.HashMap.empty).document(using _)) match {
case Success(v, _) => v
case _ => throw new RuntimeException("Parse Failed")
}
}
this.ev = ev
Expand Down
9 changes: 5 additions & 4 deletions bench/src/main/scala/sjsonnet/ParserBenchmark.scala
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import scala.collection.mutable.HashMap
import fastparse.Parsed.Success
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra._
import scala.compiletime.uninitialized

@BenchmarkMode(Array(Mode.AverageTime))
@Fork(2)
Expand All @@ -17,8 +18,8 @@ import org.openjdk.jmh.infra._
@State(Scope.Benchmark)
class ParserBenchmark {

private var allFiles: IndexedSeq[(Path, String)] = _
private var interp: Interpreter = _
private var allFiles: IndexedSeq[(Path, String)] = uninitialized
private var interp: Interpreter = uninitialized

@Setup
def setup(): Unit =
Expand All @@ -27,8 +28,8 @@ class ParserBenchmark {
@Benchmark
def main(bh: Blackhole): Unit = {
bh.consume(allFiles.foreach { case (p, s) =>
val res = fastparse.parse(s, new Parser(p, true, HashMap.empty, HashMap.empty).document(_))
bh.consume(res.asInstanceOf[Success[_]])
val res = fastparse.parse(s, new Parser(p, true, HashMap.empty, HashMap.empty).document(using _))
bh.consume(res.asInstanceOf[Success[?]])
})
}
}
60 changes: 47 additions & 13 deletions build.sbt
Original file line number Diff line number Diff line change
@@ -1,29 +1,55 @@
val sjsonnetVersion = "0.4.4"

scalaVersion in Global := "2.13.4"
val scala213 = "2.13.15"
val scala3 = "3.5.1"

val commonOptions: Seq[String] = Seq(
"-opt:l:inline",
"-opt-inline-from:sjsonnet.*,sjsonnet.**",
)

cancelable in Global := true
publish / skip := true

lazy val main = (project in file("sjsonnet"))
.settings(
Compile / scalacOptions ++= Seq("-opt:l:inline", "-opt-inline-from:sjsonnet.*,sjsonnet.**"),
name := "sjsonnet",

// Enable cross-compilation
scalaVersion := scala3,
crossScalaVersions := Seq(scala213, scala3),
scalacOptions ++= {
(CrossVersion.partialVersion(scalaVersion.value) match {
case Some((3, _)) =>
commonOptions ++ Seq(
// options dedicated for cross build / migration to Scala 3
"-source:3.5-migration"
)
case _ =>
commonOptions ++ Seq(
"-Xsource:3"
)
})
},


Test / fork := true,
Test / baseDirectory := (ThisBuild / baseDirectory).value,
libraryDependencies ++= Seq(
"com.lihaoyi" %% "fastparse" % "2.3.1",
"com.lihaoyi" %% "pprint" % "0.6.1",
"com.lihaoyi" %% "ujson" % "1.3.7",
"com.lihaoyi" %% "scalatags" % "0.9.3",
"com.lihaoyi" %% "os-lib" % "0.7.2",
"com.lihaoyi" %% "mainargs" % "0.2.0",
"com.lihaoyi" %% "fastparse" % "3.1.1",
"com.lihaoyi" %% "pprint" % "0.9.0",
"com.lihaoyi" %% "ujson" % "4.0.0",
"com.lihaoyi" %% "scalatags" % "0.12.0",
"com.lihaoyi" %% "os-lib" % "0.10.3",
"com.lihaoyi" %% "mainargs" % "0.7.5",
"org.lz4" % "lz4-java" % "1.8.0",
"org.json" % "json" % "20211205",
"org.scala-lang.modules" %% "scala-collection-compat" % "2.4.0",
"org.tukaani" % "xz" % "1.8",
"org.yaml" % "snakeyaml" % "1.30",
"org.json" % "json" % "20240303",
"org.scala-lang.modules" %% "scala-collection-compat" % "2.12.0",
"org.tukaani" % "xz" % "1.9",
"org.yaml" % "snakeyaml" % "2.0",
),
libraryDependencies ++= Seq(
"com.lihaoyi" %% "utest" % "0.7.7",
"com.lihaoyi" %% "utest" % "0.8.3",
).map(_ % "test"),
testFrameworks += new TestFramework("utest.runner.Framework"),
(Compile / unmanagedSourceDirectories) := Seq(
Expand Down Expand Up @@ -56,4 +82,12 @@ lazy val bench = (project in file("bench"))
.enablePlugins(JmhPlugin)
.settings(
run / fork := true,
// Do not cross-compile the benchmark
scalaVersion := scala3,
)

lazy val root = (project in file("."))
.aggregate(main)
.settings(
publishArtifact := false
)
3 changes: 1 addition & 2 deletions project/plugins.sbt
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
//addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.7")
addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.3")
addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.3")
4 changes: 2 additions & 2 deletions sjsonnet/src-jvm-native/sjsonnet/CachedResolvedFile.scala
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ class CachedResolvedFile(val resolvedImportPath: OsPath, memoryLimitBytes: Long,
// Assert that the file is less than limit
assert(jFile.length() <= memoryLimitBytes, s"Resolved import path ${resolvedImportPath} is too large: ${jFile.length()} bytes > ${memoryLimitBytes} bytes")

private[this] val resolvedImportContent: StaticResolvedFile = {
private val resolvedImportContent: StaticResolvedFile = {
if (jFile.length() > cacheThresholdBytes) {
// If the file is too large, then we will just read it from disk
null
Expand All @@ -35,7 +35,7 @@ class CachedResolvedFile(val resolvedImportPath: OsPath, memoryLimitBytes: Long,
}
}

private[this] def readString(jFile: File): String = {
private def readString(jFile: File): String = {
new String(Files.readAllBytes(jFile.toPath), StandardCharsets.UTF_8);
}

Expand Down
2 changes: 1 addition & 1 deletion sjsonnet/src-jvm-native/sjsonnet/OsPath.scala
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,6 @@ case class OsPath(p: os.Path) extends Path{
":" + Util.prettyIndex(lineStarts, offset)
}

p.relativeTo(os.pwd) + offsetStr
p.relativeTo(os.pwd).toString() + offsetStr
}
}
14 changes: 7 additions & 7 deletions sjsonnet/src-jvm-native/sjsonnet/SjsonnetMain.scala
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ object SjsonnetMain {
val doc = "usage: sjsonnet [sjsonnet-options] script-file"
val result = for{
config <- parser.constructEither(
args,
args.toIndexedSeq,
customName = name, customDoc = doc,
autoPrintHelpAndExit = None
)
Expand Down Expand Up @@ -113,7 +113,7 @@ object SjsonnetMain {
def writeFile(config: Config, f: os.Path, contents: String): Either[String, Unit] =
handleWriteFile(os.write.over(f, contents, createFolders = config.createDirs.value))

def writeToFile(config: Config, wd: os.Path)(materialize: Writer => Either[String, _]): Either[String, String] = {
def writeToFile(config: Config, wd: os.Path)(materialize: Writer => Either[String, ?]): Either[String, String] = {
config.outputFile match{
case None =>
val sw = new StringWriter
Expand Down Expand Up @@ -205,12 +205,12 @@ object SjsonnetMain {
importer = importer match{
case Some(i) => new Importer {
def resolve(docBase: Path, importName: String): Option[Path] =
i(docBase, importName).map(OsPath)
i(docBase, importName).map(OsPath.apply)
def read(path: Path): Option[ResolvedFile] = {
readPath(path)
}
}
case None => resolveImport(config.jpaths.map(os.Path(_, wd)).map(OsPath(_)), allowedInputs)
case None => resolveImport(config.jpaths.map(os.Path(_, wd)).map(OsPath.apply), allowedInputs)
},
parseCache,
settings = new Settings(
Expand Down Expand Up @@ -246,8 +246,8 @@ object SjsonnetMain {
Right(writer.toString)
}
}
relPath = os.FilePath(multiPath) / os.RelPath(f)
_ <- writeFile(config, relPath.resolveFrom(wd), rendered)
relPath = os.Path(multiPath, wd) / f
_ <- writeFile(config, relPath, rendered)
} yield relPath
}

Expand Down Expand Up @@ -299,7 +299,7 @@ object SjsonnetMain {
* of caching on top of the underlying file system. Small files are read into memory, while large
* files are read from disk.
*/
private[this] def readPath(path: Path): Option[ResolvedFile] = {
private def readPath(path: Path): Option[ResolvedFile] = {
val osPath = path.asInstanceOf[OsPath].p
if (os.exists(osPath) && os.isFile(osPath)) {
Some(new CachedResolvedFile(path.asInstanceOf[OsPath], memoryLimitBytes = Int.MaxValue.toLong))
Expand Down
16 changes: 9 additions & 7 deletions sjsonnet/src-jvm/sjsonnet/Platform.scala
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,13 @@ package sjsonnet

import org.json.JSONObject

import java.io.{ByteArrayOutputStream, BufferedInputStream, File, FileInputStream}
import java.io.{BufferedInputStream, ByteArrayOutputStream, File, FileInputStream}
import java.util.Base64
import java.util.zip.GZIPOutputStream
import net.jpountz.xxhash.{StreamingXXHash64, XXHashFactory, XXHash64}
import net.jpountz.xxhash.{StreamingXXHash64, XXHash64, XXHashFactory}
import org.tukaani.xz.LZMA2Options
import org.tukaani.xz.XZOutputStream
import org.yaml.snakeyaml.Yaml
import org.yaml.snakeyaml.{LoaderOptions, Yaml}
import org.yaml.snakeyaml.constructor.Constructor

object Platform {
Expand All @@ -23,12 +23,13 @@ object Platform {
outputStream.close()
}
}

def gzipString(s: String): String = {
gzipBytes(s.getBytes())
}

/**
* Valid compression levels are 0 (no compression) to 9 (maximum compression).
* Valid compression levels are 0 (no compression) to 9 (maximum compression).
*/
def xzBytes(b: Array[Byte], compressionLevel: Option[Int]): String = {
val outputStream: ByteArrayOutputStream = new ByteArrayOutputStream(b.length)
Expand All @@ -49,14 +50,15 @@ object Platform {
}

def yamlToJson(yamlString: String): String = {
val yaml: java.util.LinkedHashMap[String, Object] = new Yaml(new Constructor(classOf[java.util.LinkedHashMap[String, Object]])).load(yamlString)
val options = new LoaderOptions()
val yaml: java.util.LinkedHashMap[String, Object] = new Yaml(new Constructor(classOf[java.util.LinkedHashMap[String, Object]], options)).load(yamlString)
new JSONObject(yaml).toString()
}

private def computeHash(algorithm: String, s: String) = {
java.security.MessageDigest.getInstance(algorithm)
.digest(s.getBytes("UTF-8"))
.map{ b => String.format("%02x", (b & 0xff).asInstanceOf[Integer])}
.map { b => String.format("%02x", Integer.valueOf(b & 0xff)) }
.mkString
}

Expand All @@ -71,7 +73,7 @@ object Platform {
// Same as go-jsonnet https://github.com/google/go-jsonnet/blob/2b4d7535f540f128e38830492e509a550eb86d57/builtins.go#L959
def sha3(s: String): String = computeHash("SHA3-512", s)

private[this] val xxHashFactory = XXHashFactory.fastestInstance()
private val xxHashFactory = XXHashFactory.fastestInstance()

def hashFile(file: File): String = {
val buffer = new Array[Byte](8192)
Expand Down
Loading