Skip to content

Commit

Permalink
Merge pull request #287 from DmitryPodpryatov/char-reader-writer
Browse files Browse the repository at this point in the history
Add reader and writer for Char
  • Loading branch information
dos65 committed May 15, 2024
2 parents 8ca2e86 + b2a0437 commit 56baa08
Show file tree
Hide file tree
Showing 6 changed files with 30 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,12 @@ trait AllJsonReaders extends OptionReaders {
}
}

implicit lazy val charReader: JsonReader[Char] = stringReader.mapWithField{ implicit fieldName => {
case s if s.length == 1 => s.head
case s => ReaderError.wrongJson(s"Expected char value but found: $s")
}
}

implicit lazy val numberReader: JsonReader[Number] = new JsonReader[Number] {
override def read(it: TokenIterator)(implicit fieldName: FieldName): Number = {
if(it.currentToken().isNumberValue) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,10 @@ trait AllJsonWriters extends OptionWriters with EitherWriters {
override def write(value: String, tokenWriter: TokenWriter): Unit = tokenWriter.writeString(value)
}

implicit lazy val charWriter: JsonWriter[Char] = new JsonWriter[Char] {
override def write(value: Char, tokenWriter: TokenWriter): Unit = tokenWriter.writeString(value.toString)
}

implicit lazy val javaIntWriter: JsonWriter[java.lang.Integer] = new JsonWriter[java.lang.Integer] {
override def write(value: java.lang.Integer, tokenWriter: TokenWriter): Unit = tokenWriter.writeNumber(value)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ class DefaultReadersTest extends AnyFlatSpec {

private val cases: List[(TestDefinition[_], List[TokenNode])] = List[(TestDefinition[_], List[TokenNode])](
test("1") -> value("1"),
test('1') -> value("1"),
test(1) -> value(1),
test(1: Short) -> value(1: Short),
test(1L) -> value(1L),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ class JsonReaderBuilderTest extends AnyFlatSpec with Matchers {
.addField[Boolean]("c")
.addField[Seq[String]]("d")
.addField[Double]("e")
.addField[Char]("f")
.addField[Option[Int]]("opt")
.buildReader(FatClass.apply)
}
Expand All @@ -72,13 +73,15 @@ class JsonReaderBuilderTest extends AnyFlatSpec with Matchers {
"b" -> "s",
"c" -> true,
"d" -> arr("a", "b", "c"),
"e" -> 4
"e" -> 4,
"f" -> "c"
)) shouldBe FatClass(
a = 1,
b = "s",
c = true,
d = Seq("a", "b", "c"),
e = 4.0D,
f = 'c',
opt = None
)
}
Expand Down Expand Up @@ -165,5 +168,6 @@ object JsonReaderBuilderTest {
c: Boolean,
d: Seq[String],
e: Double,
f: Char,
opt: Option[Int])
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ class DefaultWritersTest extends AnyFlatSpec {

private val cases: List[(TestDefinition[_], List[TokenNode])] = List[(TestDefinition[_], List[TokenNode])](
test("1") -> value("1"),
test('1') -> value("1"),
test(1) -> value(1),
test(1: Short) -> value(1: Short),
test(1L) -> value(1L),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import org.scalatest.matchers.should.Matchers.{value => _, _}
import org.scalatest.flatspec.AnyFlatSpec
import tethys.{JsonObjectWriter, JsonWriter}
import tethys.commons.TokenNode._
import tethys.writers.SimpleJsonObjectWriterTest.TestData
import tethys.writers.SimpleJsonObjectWriterTest.{CharData, TestData}
import tethys.writers.instances.SimpleJsonObjectWriter
import tethys.writers.tokens.SimpleTokenWriter._

Expand Down Expand Up @@ -39,10 +39,22 @@ class SimpleJsonObjectWriterTest extends AnyFlatSpec {
"c" -> false
)
}

it should "write correct object with char field" in {
implicit val charWriter: SimpleJsonObjectWriter[CharData] = {
JsonWriter.obj[CharData]
.addField("c")(_.c)
}

CharData('c').asTokenList shouldBe obj(
"c" -> "c"
)
}
}

object SimpleJsonObjectWriterTest {

case class TestData(a: Int, b: String)
case class CharData(c: Char)

}

0 comments on commit 56baa08

Please sign in to comment.