Skip to content

Commit

Permalink
Merge pull request #247 from lolgab/update-libs
Browse files Browse the repository at this point in the history
Add Scala Native, update libraries, update scalafmt
  • Loading branch information
cquiroz authored Jul 16, 2024
2 parents 4627f58 + 124c199 commit f6c7f86
Show file tree
Hide file tree
Showing 34 changed files with 937 additions and 825 deletions.
6 changes: 3 additions & 3 deletions .github/workflows/scala.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,11 @@ jobs:
matrix:
include:
- java: 8
scala: 2.12.14
scala: 2.12.19
- java: 14
scala: 2.13.6
scala: 2.13.14
- java: 8
scala: 3.0.1
scala: 3.3.3

steps:

Expand Down
13 changes: 11 additions & 2 deletions .scalafmt.conf
Original file line number Diff line number Diff line change
@@ -1,5 +1,14 @@
version = 3.8.2
runner.dialect = scala213
style = default
align = more
align.preset = "more"
maxColumn = 125
rewrite.rules = [SortImports]
rewrite.rules = [ SortImports ]
project.git = true
docstrings.blankFirstLine = unfold

fileOverride {
"glob:**/src/*/scala-3/**" {
runner.dialect = scala3
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
package java.security

// dummy unsecure SecureRandom implementation for tests
// for real usage, use an implementation
// like https://github.com/lolgab/scala-native-crypto for Scala Native
// and https://github.com/scala-js/scala-js-java-securerandom for Scala.js
class SecureRandom extends java.util.Random
30 changes: 14 additions & 16 deletions boopickle/js/src/main/scala/boopickle/StringCodec.scala
Original file line number Diff line number Diff line change
Expand Up @@ -30,15 +30,13 @@ object StringCodec extends StringCodecBase {
private lazy val utf8decoder: (Int8Array) => String = {
val td = new TextDecoder
// use native TextDecoder
(data: Int8Array) =>
td.decode(data)
(data: Int8Array) => td.decode(data)
}

private lazy val utf8encoder: (String) => Int8Array = {
val te = new TextEncoder
// use native TextEncoder
(str: String) =>
new Int8Array(te.encode(str).buffer)
(str: String) => new Int8Array(te.encode(str).buffer)
}

private lazy val utf16decoder: (Uint16Array) => String = {
Expand Down Expand Up @@ -103,7 +101,7 @@ object StringCodec extends StringCodecBase {
val ta = new Uint16Array(buf.typedArray().buffer, buf.position() + buf.typedArray().byteOffset, len / 2)
buf.position(buf.position() + len)
utf16decoder(ta)
//new String(ta.toArray) // alt implementation
// new String(ta.toArray) // alt implementation
} else {
val a = new Array[Byte](len)
buf.get(a)
Expand Down Expand Up @@ -149,12 +147,12 @@ object StringCodec extends StringCodecBase {
buf(dst) = c.toByte
dst += 1
} else if (c < 0x4000) {
buf(dst) = (0x80 | (c & 0x3F)).toByte
buf(dst + 1) = (c >> 6 & 0xFF).toByte
buf(dst) = (0x80 | (c & 0x3f)).toByte
buf(dst + 1) = (c >> 6 & 0xff).toByte
dst += 2
} else {
buf(dst) = (0xC0 | (c & 0x3F)).toByte
buf(dst + 1) = (c >> 6 & 0xFF).toByte
buf(dst) = (0xc0 | (c & 0x3f)).toByte
buf(dst + 1) = (c >> 6 & 0xff).toByte
buf(dst + 2) = (c >> 14).toByte
dst += 3
}
Expand All @@ -176,19 +174,19 @@ object StringCodec extends StringCodecBase {
var offset = buf.position()
var dst = 0
while (dst < len) {
val b = src(offset) & 0xFF
val b = src(offset) & 0xff
offset += 1
if ((b & 0x80) == 0) {
cp(dst) = b
} else if ((b & 0xC0) == 0x80) {
val b1 = src(offset) & 0xFF
} else if ((b & 0xc0) == 0x80) {
val b1 = src(offset) & 0xff
offset += 1
cp(dst) = b & 0x3F | b1 << 6
cp(dst) = b & 0x3f | b1 << 6
} else {
val b1 = src(offset) & 0xFF
val b2 = src(offset + 1) & 0xFF
val b1 = src(offset) & 0xff
val b2 = src(offset + 1) & 0xff
offset += 2
cp(dst) = b & 0x3F | b1 << 6 | b2 << 14
cp(dst) = b & 0x3f | b1 << 6 | b2 << 14
}
dst += 1
}
Expand Down
4 changes: 2 additions & 2 deletions boopickle/js/src/test/scala/boopickle/StringCodecTests.scala
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ object StringCodecTests extends TestSuite {
val bufs = sizes.map(createBB)

val strings = bufs.map(b => Unpickle[String].fromBytes(b))
sizes.zip(strings).foreach {
case (size, str) => assert(str.length == size)
sizes.zip(strings).foreach { case (size, str) =>
assert(str.length == size)
}
}
}
Expand Down
22 changes: 14 additions & 8 deletions boopickle/shared/src/main/scala-2.12/boopickle/XCompat.scala
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,9 @@ import scala.language.higherKinds
trait XCompatImplicitPicklers {
this: PicklerHelper =>

implicit def mapPickler[T: P, S: P, V[_, _] <: scala.collection.Map[_, _]](
implicit cbf: CanBuildFrom[Nothing, (T, S), V[T, S]]): P[V[T, S]] =
implicit def mapPickler[T: P, S: P, V[_, _] <: scala.collection.Map[_, _]](implicit
cbf: CanBuildFrom[Nothing, (T, S), V[T, S]]
): P[V[T, S]] =
BasicPicklers.MapPickler[T, S, V]
implicit def iterablePickler[T: P, V[_] <: Iterable[_]](implicit cbf: CanBuildFrom[Nothing, T, V[T]]): P[V[T]] =
BasicPicklers.IterablePickler[T, V]
Expand All @@ -22,8 +23,10 @@ trait XCompatPicklers {
/**
* This pickler works on all collections that derive from Iterable[T] (Vector, Set, List, etc)
*
* @tparam T type of the values
* @tparam V type of the collection
* @tparam T
* type of the values
* @tparam V
* type of the collection
* @return
*/
def IterablePickler[T: P, V[_] <: Iterable[_]](implicit cbf: CanBuildFrom[Nothing, T, V[T]]): P[V[T]] = new P[V[T]] {
Expand Down Expand Up @@ -63,12 +66,15 @@ trait XCompatPicklers {
/**
* Maps require a specific pickler as they have two type parameters.
*
* @tparam T Type of keys
* @tparam S Type of values
* @tparam T
* Type of keys
* @tparam S
* Type of values
* @return
*/
def MapPickler[T: P, S: P, V[_, _] <: scala.collection.Map[_, _]](
implicit cbf: CanBuildFrom[Nothing, (T, S), V[T, S]]): P[V[T, S]] =
def MapPickler[T: P, S: P, V[_, _] <: scala.collection.Map[_, _]](implicit
cbf: CanBuildFrom[Nothing, (T, S), V[T, S]]
): P[V[T, S]] =
new P[V[T, S]] {
override def pickle(map: V[T, S])(implicit state: PickleState): Unit = {
if (map == null) {
Expand Down
11 changes: 7 additions & 4 deletions boopickle/shared/src/main/scala-2.13+/boopickle/XCompat.scala
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,9 @@ import scala.collection.immutable.SeqMap
trait XCompatImplicitPicklers1 {
this: PicklerHelper =>

implicit def mapPickler[K: P, V: P, M[_, _] <: scala.collection.Map[_, _]](
implicit f: Factory[(K, V), M[K, V]]): P[M[K, V]] = BasicPicklers.MapPickler[K, V, M]
implicit def mapPickler[K: P, V: P, M[_, _] <: scala.collection.Map[_, _]](implicit
f: Factory[(K, V), M[K, V]]
): P[M[K, V]] = BasicPicklers.MapPickler[K, V, M]

implicit def iterablePickler[A: P, F[_] <: Iterable[_]](implicit cbf: Factory[A, F[A]]): P[F[A]] =
BasicPicklers.IterablePickler[A, F]
Expand All @@ -28,8 +29,10 @@ trait XCompatPicklers {
/**
* This pickler works on all collections that derive from Iterable[A] (Vector, Set, List, etc)
*
* @tparam A type of the values
* @tparam F type of the collection
* @tparam A
* type of the values
* @tparam F
* type of the collection
* @return
*/
def IterablePickler[A: P, F[_] <: Iterable[_]](implicit cbf: Factory[A, F[A]]): P[F[A]] = new P[F[A]] {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,8 @@ object PicklerMaterializersImpl {
if (!sym.isCaseClass) {
c.error(
c.enclosingPosition,
s"Cannot materialize pickler for non-case class: $tpe. If this is a collection, the error can refer to the class inside.")
s"Cannot materialize pickler for non-case class: $tpe. If this is a collection, the error can refer to the class inside."
)
return c.Expr[Pickler[T]](q"null")
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ object PicklerMaterializersImpl {

Expr.summon[Mirror.Of[A]] match {
case Some('{ $p: Mirror.ProductOf[A] }) => deriveProduct[A](p)
case Some('{ $s: Mirror.SumOf[A] }) => deriveSum[A](s)
case Some('{ $s: Mirror.SumOf[A] }) => deriveSum[A](s)
case _ => deriveOther[A]
}
}
Expand All @@ -26,27 +26,24 @@ object PicklerMaterializersImpl {

m match {

case '{ $x: Mirror.ProductOf[A] { type MirroredElemTypes = EmptyTuple }} =>
case '{ $x: Mirror.ProductOf[A] { type MirroredElemTypes = EmptyTuple } } =>
val a = '{ $x.fromProduct(EmptyTuple) }
'{ ConstPickler($a) }

case '{ $x: Mirror.ProductOf[A] { type MirroredElemTypes = t *: EmptyTuple }} =>
case '{ $x: Mirror.ProductOf[A] { type MirroredElemTypes = t *: EmptyTuple } } =>
val pickler = exprSummonLater[Pickler[t]]
'{ $pickler.xmap[A](
v => $x.fromProduct(Tuple1(v)))(
a => a.asInstanceOf[Product].productElement(0).asInstanceOf[t])
}
'{ $pickler.xmap[A](v => $x.fromProduct(Tuple1(v)))(a => a.asInstanceOf[Product].productElement(0).asInstanceOf[t]) }

case '{ type p <: AnyRef; type t <: Tuple; $x: Mirror.ProductOf[`p`] { type MirroredElemTypes = `t` }} =>
lazy val picklerExprs = summonAllPicklers[t]
lazy val picklers = Expr.ofList(picklerExprs)
case '{ type p <: AnyRef; type t <: Tuple; $x: Mirror.ProductOf[`p`] { type MirroredElemTypes = `t` } } =>
lazy val picklerExprs = summonAllPicklers[t]
lazy val picklers = Expr.ofList(picklerExprs)
val result: Expr[Pickler[p]] = '{ deriveAnyRefProduct[p]($x, $picklers) }
result.asInstanceOf[Expr[Pickler[A]]]
}
}

def deriveAnyRefProduct[A <: AnyRef](m: Mirror.ProductOf[A], _picklers: => List[Pickler[_]]): Pickler[A] = {
lazy val picklers = _picklers.asInstanceOf[List[Pickler[Any]]]
lazy val picklers = _picklers.asInstanceOf[List[Pickler[Any]]]
lazy val picklerCount = picklers.size
new Pickler[A] {
override def pickle(value: A)(implicit state: PickleState): Unit = {
Expand Down Expand Up @@ -88,19 +85,19 @@ object PicklerMaterializersImpl {

def fields[T <: Tuple](using Type[T]): List[Expr[(Pickler[_], ClassTag[_])]] = {
Type.of[T] match {
case '[ h *: tail ] =>
val p = exprSummonLater[Pickler [h]]
case '[h *: tail] =>
val p = exprSummonLater[Pickler[h]]
val c = exprSummonLater[ClassTag[h]]
'{ ($p, $c) } :: fields[tail]
case '[ EmptyTuple ] =>
case '[EmptyTuple] =>
Nil
}
}

m match {
case '{ type t <: Tuple; $x: Mirror.SumOf[A] { type MirroredElemTypes = `t` }} =>
case '{ type t <: Tuple; $x: Mirror.SumOf[A] { type MirroredElemTypes = `t` } } =>
val fs = fields[t]
val e = '{ sumTypeHack[A](${ Expr.ofList(fs) }) }
val e = '{ sumTypeHack[A](${ Expr.ofList(fs) }) }
inlineExpr(e)
}
}
Expand All @@ -124,16 +121,16 @@ object PicklerMaterializersImpl {
(sym.caseFields, sym.companionModule.memberMethod("apply")) match {
case (field :: Nil, apply :: Nil) =>
TypeRepr.of[A].memberType(field).asType match {
case '[ t ] =>
case '[t] =>
lazy val pickler = exprSummonLater[Pickler[t]]
apply.tree match {
case DefDef(_, (p :: Nil) :: Nil, _, _) =>
def build(e: Expr[t]): Expr[A] = Apply(Ref(apply), e.asTerm :: Nil).asExprOf[A]
def build(e: Expr[t]): Expr[A] = Apply(Ref(apply), e.asTerm :: Nil).asExprOf[A]
def access(e: Expr[A]): Expr[t] = Select(e.asTerm, field).asExprOf[t]
return '{ $pickler.xmap[A](b => ${build('b)})(a => ${access('a)}) }
return '{ $pickler.xmap[A](b => ${ build('b) })(a => ${ access('a) }) }
case _ =>
}
}
}
case _ =>
}
case _ =>
Expand All @@ -155,8 +152,8 @@ object PicklerMaterializersImpl {
def summonAllPicklers[A <: Tuple](using Quotes, Type[A]): List[Expr[Pickler[_]]] = {
import quotes.reflect._
Type.of[A] match {
case '[ EmptyTuple ] => Nil
case '[ a *: as ] => exprSummonLater[Pickler[a]] :: summonAllPicklers[as]
case '[EmptyTuple] => Nil
case '[a *: as] => exprSummonLater[Pickler[a]] :: summonAllPicklers[as]
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,11 @@ import java.nio.{ByteBuffer, ByteOrder}
trait BufferProvider {

/**
* Makes sure the ByteBuffer has enough space for new data. If not, allocates a new ByteBuffer
* and returns it. The returned ByteBuffer must have little-endian ordering.
* Makes sure the ByteBuffer has enough space for new data. If not, allocates a new ByteBuffer and returns it. The
* returned ByteBuffer must have little-endian ordering.
*
* @param size Number of bytes needed for new data
* @param size
* Number of bytes needed for new data
* @return
*/
def alloc(size: Int): ByteBuffer
Expand Down
Loading

0 comments on commit f6c7f86

Please sign in to comment.