-
Notifications
You must be signed in to change notification settings - Fork 162
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* Initial commit of wip from https://github.com/zio/zio-web/pull/90. #7 > Co-authored-by: Brandon Brown <[email protected]> Co-authored-by: Jason Pickens <[email protected]> * WIP trying to get build working * wip * Fixes #7: Implement JSON Codec * Formatting * More unit tests * unit tests * Add non-streaming methods to JsonCodec * More test fixes * Transform failure tests * Update to published version of zio-json * Scala 2.12 support * Work around jdk 8 duration parsing bug Co-authored-by: Brandon Brown <[email protected]> Co-authored-by: thinkharder <[email protected]>
- Loading branch information
1 parent
be72f2e
commit e58a401
Showing
7 changed files
with
1,149 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,235 @@ | ||
package zio.schema.codec | ||
|
||
import java.nio.CharBuffer | ||
import java.nio.charset.StandardCharsets | ||
|
||
import zio.json.JsonCodec._ | ||
import zio.json.JsonDecoder.{ JsonError, UnsafeJson } | ||
import zio.json.internal.{ Lexer, RetractReader, Write } | ||
import zio.json.{ JsonCodec => ZJsonCodec, JsonDecoder, JsonEncoder, JsonFieldDecoder, JsonFieldEncoder } | ||
import zio.schema.{ StandardType, _ } | ||
import zio.stream.ZTransducer | ||
import zio.{ Chunk, ChunkBuilder, ZIO } | ||
|
||
object JsonCodec extends Codec { | ||
|
||
override def encoder[A](schema: Schema[A]): ZTransducer[Any, Nothing, A, Byte] = | ||
ZTransducer.fromPush( | ||
(opt: Option[Chunk[A]]) => | ||
ZIO.succeed(opt.map(values => values.flatMap(Encoder.encode(schema, _))).getOrElse(Chunk.empty)) | ||
) | ||
|
||
override def decoder[A](schema: Schema[A]): ZTransducer[Any, String, Byte, A] = schema match { | ||
case Schema.Primitive(StandardType.UnitType) => | ||
ZTransducer.fromPush(_ => ZIO.succeed(Chunk.unit)) | ||
case _ => | ||
ZTransducer.utfDecode >>> ZTransducer.fromFunctionM( | ||
(s: String) => ZIO.fromEither(Decoder.decode(schema, s)) | ||
) | ||
} | ||
|
||
override def encode[A](schema: Schema[A]): A => Chunk[Byte] = Encoder.encode(schema, _) | ||
|
||
override def decode[A](schema: Schema[A]): Chunk[Byte] => Either[String, A] = | ||
(chunk: Chunk[Byte]) => Decoder.decode(schema, new String(chunk.toArray, Encoder.CHARSET)) | ||
|
||
object Codecs { | ||
protected[codec] val unitEncoder: JsonEncoder[Unit] = new JsonEncoder[Unit] { | ||
override def unsafeEncode(a: Unit, indent: Option[Int], out: Write): Unit = () | ||
override def isNothing(a: Unit): Boolean = true | ||
} | ||
protected[codec] val unitDecoder: JsonDecoder[Unit] = | ||
(_: List[JsonDecoder.JsonError], _: RetractReader) => () | ||
|
||
protected[codec] val unitCodec: ZJsonCodec[Unit] = ZJsonCodec(unitEncoder, unitDecoder) | ||
|
||
protected[codec] def failDecoder[A](message: String): JsonDecoder[A] = | ||
(trace: List[JsonDecoder.JsonError], _: RetractReader) => throw UnsafeJson(JsonError.Message(message) :: trace) | ||
|
||
private[codec] def primitiveCodec[A](standardType: StandardType[A]): ZJsonCodec[A] = | ||
standardType match { | ||
case StandardType.UnitType => unitCodec | ||
case StandardType.StringType => ZJsonCodec.string | ||
case StandardType.BoolType => ZJsonCodec.boolean | ||
case StandardType.ShortType => ZJsonCodec.short | ||
case StandardType.IntType => ZJsonCodec.int | ||
case StandardType.LongType => ZJsonCodec.long | ||
case StandardType.FloatType => ZJsonCodec.float | ||
case StandardType.DoubleType => ZJsonCodec.double | ||
case StandardType.BinaryType => ZJsonCodec.chunk(ZJsonCodec.byte) | ||
case StandardType.CharType => ZJsonCodec.char | ||
case StandardType.DayOfWeekType => ZJsonCodec.dayOfWeek // ZJsonCodec[java.time.DayOfWeek] | ||
case StandardType.Duration(_) => ZJsonCodec.duration //ZJsonCodec[java.time.Duration] | ||
case StandardType.Instant(_) => ZJsonCodec.instant //ZJsonCodec[java.time.Instant] | ||
case StandardType.LocalDate(_) => ZJsonCodec.localDate //ZJsonCodec[java.time.LocalDate] | ||
case StandardType.LocalDateTime(_) => ZJsonCodec.localDateTime //ZJsonCodec[java.time.LocalDateTime] | ||
case StandardType.LocalTime(_) => ZJsonCodec.localTime //ZJsonCodec[java.time.LocalTime] | ||
case StandardType.Month => ZJsonCodec.month //ZJsonCodec[java.time.Month] | ||
case StandardType.MonthDay => ZJsonCodec.monthDay //ZJsonCodec[java.time.MonthDay] | ||
case StandardType.OffsetDateTime(_) => ZJsonCodec.offsetDateTime //ZJsonCodec[java.time.OffsetDateTime] | ||
case StandardType.OffsetTime(_) => ZJsonCodec.offsetTime //ZJsonCodec[java.time.OffsetTime] | ||
case StandardType.Period => ZJsonCodec.period //ZJsonCodec[java.time.Period] | ||
case StandardType.Year => ZJsonCodec.year //ZJsonCodec[java.time.Year] | ||
case StandardType.YearMonth => ZJsonCodec.yearMonth //ZJsonCodec[java.time.YearMonth] | ||
case StandardType.ZonedDateTime(_) => ZJsonCodec.zonedDateTime //ZJsonCodec[java.time.ZonedDateTime] | ||
case StandardType.ZoneId => ZJsonCodec.zoneId //ZJsonCodec[java.time.ZoneId] | ||
case StandardType.ZoneOffset => ZJsonCodec.zoneOffset //ZJsonCodec[java.time.ZoneOffset] | ||
} | ||
} | ||
|
||
object Encoder { | ||
import Codecs._ | ||
import JsonEncoder.{ bump, pad } | ||
|
||
private[codec] val CHARSET = StandardCharsets.UTF_8 | ||
|
||
final def encode[A](schema: Schema[A], value: A): Chunk[Byte] = | ||
charSequenceToByteChunk(schemaEncoder(schema, value).encodeJson(value, None)) | ||
|
||
private[codec] def charSequenceToByteChunk(chars: CharSequence): Chunk[Byte] = { | ||
val bytes = CHARSET.newEncoder().encode(CharBuffer.wrap(chars)) | ||
Chunk.fromByteBuffer(bytes) | ||
} | ||
|
||
private def schemaEncoder[A](schema: Schema[A], value: A): JsonEncoder[A] = schema match { | ||
case Schema.Primitive(standardType) => primitiveCodec(standardType) | ||
case Schema.Sequence(schema) => JsonEncoder.chunk(schemaEncoder(schema, value)) | ||
case Schema.Transform(c, _, g) => transformEncoder(c, value, g) | ||
case s @ Schema.Tuple(_, _) => tupleEncoder(s, value) | ||
case s @ Schema.Optional(_) => optionEncoder(s, value) | ||
case Schema.Fail(_) => unitEncoder.contramap(_ => ()) | ||
case Schema.Record(structure) => recordEncoder(structure, value) | ||
case Schema.Enumeration(structure) => enumEncoder(structure, value) | ||
} | ||
|
||
private def tupleEncoder[A, B](schema: Schema.Tuple[A, B], value: (A, B)): JsonEncoder[(A, B)] = | ||
schemaEncoder(schema.left, value._1).both(schemaEncoder(schema.right, value._2)) | ||
|
||
private def optionEncoder[A](schema: Schema.Optional[A], value: Option[A]): JsonEncoder[Option[A]] = value match { | ||
case Some(v) => JsonEncoder.option(schemaEncoder(schema.codec, v)) | ||
case None => | ||
(_: Option[A], _: Option[Int], out: Write) => out.write("null") | ||
} | ||
|
||
private def transformEncoder[A, B](schema: Schema[A], value: B, g: B => Either[String, A]): JsonEncoder[B] = { | ||
(_: B, indent: Option[Int], out: Write) => | ||
g(value) match { | ||
case Left(_) => () | ||
case Right(a) => schemaEncoder(schema, a).unsafeEncode(a, indent, out) | ||
} | ||
} | ||
|
||
private def recordEncoder(structure: Map[String, Schema[_]], value: Map[String, _]): JsonEncoder[Map[String, _]] = { | ||
(_: Map[String, _], indent: Option[Int], out: Write) => | ||
{ | ||
if (structure.isEmpty) { | ||
out.write("{}") | ||
} else { | ||
out.write('{') | ||
val indent_ = bump(indent) | ||
pad(indent_, out) | ||
var first = true | ||
structure.foreach { | ||
case (k, a) => | ||
val enc = schemaEncoder(a.asInstanceOf[Schema[Any]], value(k)) | ||
if (first) | ||
first = false | ||
else { | ||
out.write(',') | ||
if (indent.isDefined) | ||
JsonEncoder.pad(indent_, out) | ||
} | ||
|
||
string.unsafeEncode(JsonFieldEncoder.string.unsafeEncodeField(k), indent_, out) | ||
if (indent.isEmpty) out.write(':') | ||
else out.write(" : ") | ||
enc.unsafeEncode(value(k), indent_, out) | ||
} | ||
pad(indent, out) | ||
out.write('}') | ||
} | ||
|
||
} | ||
} | ||
|
||
private def enumEncoder(structure: Map[String, Schema[_]], value: Map[String, _]): JsonEncoder[Map[String, _]] = { | ||
(a: Map[String, _], indent: Option[Int], out: Write) => | ||
{ | ||
if (structure.isEmpty) { | ||
out.write("{}") | ||
} else { | ||
out.write('{') | ||
val indent_ = bump(indent) | ||
pad(indent_, out) | ||
var first = true | ||
val (k, v) = a.toSeq.head | ||
val enc = schemaEncoder(structure(k).asInstanceOf[Schema[Any]], value(k)) | ||
if (first) | ||
first = false | ||
else { | ||
out.write(',') | ||
if (indent.isDefined) | ||
pad(indent_, out) | ||
} | ||
|
||
string.unsafeEncode(JsonFieldEncoder.string.unsafeEncodeField(k), indent_, out) | ||
if (indent.isEmpty) out.write(':') | ||
else out.write(" : ") | ||
enc.unsafeEncode(v, indent_, out) | ||
pad(indent, out) | ||
out.write('}') | ||
} | ||
} | ||
} | ||
} | ||
|
||
object Decoder { | ||
import Codecs._ | ||
final def decode[A](schema: Schema[A], json: String): Either[String, A] = | ||
schemaDecoder(schema).decodeJson(json) | ||
|
||
private def schemaDecoder[A](schema: Schema[A]): JsonDecoder[A] = schema match { | ||
case Schema.Primitive(standardType) => primitiveCodec(standardType) | ||
case Schema.Optional(codec) => JsonDecoder.option(schemaDecoder(codec)) | ||
case Schema.Tuple(left, right) => JsonDecoder.tuple2(schemaDecoder(left), schemaDecoder(right)) | ||
case Schema.Transform(codec, f, _) => schemaDecoder(codec).mapOrFail(f) | ||
case Schema.Sequence(codec) => JsonDecoder.chunk(schemaDecoder(codec)) | ||
case Schema.Fail(message) => failDecoder(message) | ||
case Schema.Record(structure) => recordDecoder(structure) | ||
case Schema.Enumeration(structure) => enumDecoder(structure) | ||
} | ||
|
||
private def recordDecoder(structure: Map[String, Schema[_]]): JsonDecoder[Map[String, Any]] = { | ||
(trace: List[JsonError], in: RetractReader) => | ||
{ | ||
val builder: ChunkBuilder[(String, Any)] = zio.ChunkBuilder.make[(String, Any)](structure.size) | ||
Lexer.char(trace, in, '{') | ||
if (Lexer.firstField(trace, in)) | ||
do { | ||
val field = Lexer.string(trace, in).toString | ||
val trace_ = JsonError.ObjectAccess(field) :: trace | ||
Lexer.char(trace_, in, ':') | ||
val value = schemaDecoder(structure(field)).unsafeDecode(trace_, in) | ||
builder += ((JsonFieldDecoder.string.unsafeDecodeField(trace_, field), value)) | ||
} while (Lexer.nextField(trace, in)) | ||
builder.result().toMap | ||
} | ||
} | ||
|
||
private def enumDecoder(structure: Map[String, Schema[_]]): JsonDecoder[Map[String, Any]] = { | ||
(trace: List[JsonError], in: RetractReader) => | ||
{ | ||
val builder: ChunkBuilder[(String, Any)] = zio.ChunkBuilder.make[(String, Any)](structure.size) | ||
Lexer.char(trace, in, '{') | ||
if (Lexer.firstField(trace, in)) { | ||
val field = Lexer.string(trace, in).toString | ||
val trace_ = JsonError.ObjectAccess(field) :: trace | ||
Lexer.char(trace_, in, ':') | ||
val value = schemaDecoder(structure(field)).unsafeDecode(trace_, in) | ||
builder += ((JsonFieldDecoder.string.unsafeDecodeField(trace_, field), value)) | ||
} | ||
builder.result().toMap | ||
} | ||
} | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,137 @@ | ||
package zio.schema | ||
|
||
import java.time._ | ||
import java.time.temporal.ChronoField | ||
|
||
import zio.random.Random | ||
import zio.test.Gen | ||
|
||
object JavaTimeGen { | ||
|
||
val anyDayOfWeek: Gen[Random, DayOfWeek] = Gen.oneOf( | ||
Gen.const(DayOfWeek.MONDAY), | ||
Gen.const(DayOfWeek.TUESDAY), | ||
Gen.const(DayOfWeek.WEDNESDAY), | ||
Gen.const(DayOfWeek.THURSDAY), | ||
Gen.const(DayOfWeek.FRIDAY), | ||
Gen.const(DayOfWeek.SATURDAY), | ||
Gen.const(DayOfWeek.SUNDAY) | ||
) | ||
|
||
val anyMonth: Gen[Random, Month] = Gen.oneOf( | ||
Gen.const(Month.JANUARY), | ||
Gen.const(Month.FEBRUARY), | ||
Gen.const(Month.MARCH), | ||
Gen.const(Month.APRIL), | ||
Gen.const(Month.MAY), | ||
Gen.const(Month.JUNE), | ||
Gen.const(Month.JULY), | ||
Gen.const(Month.AUGUST), | ||
Gen.const(Month.SEPTEMBER), | ||
Gen.const(Month.OCTOBER), | ||
Gen.const(Month.NOVEMBER), | ||
Gen.const(Month.DECEMBER) | ||
) | ||
|
||
val anyNanoOfDay: Gen[Random, Long] = chronoFieldValue(ChronoField.NANO_OF_DAY) | ||
|
||
val anyEpochDay: Gen[Random, Long] = chronoFieldValue(ChronoField.EPOCH_DAY) | ||
|
||
val anyMonthOfYear: Gen[Random, Int] = chronoFieldValue(ChronoField.MONTH_OF_YEAR).map(_.toInt) | ||
|
||
val anyMonthDay: Gen[Random, MonthDay] = | ||
for { | ||
month <- anyMonth | ||
dayOfMonth <- Gen.int(1, month.maxLength) | ||
} yield MonthDay.of(month, dayOfMonth) | ||
|
||
//Needs to be an ISO-8601 year between 0000 and 9999 | ||
val anyIntYear: Gen[Random, Int] = Gen.int(0, 9999) | ||
|
||
val anyYear: Gen[Random, Year] = anyIntYear.map(Year.of) | ||
|
||
val anyYearMonth: Gen[Random, YearMonth] = | ||
anyIntYear.zipWith(anyMonthOfYear) { (year, month) => | ||
YearMonth.of(year, month) | ||
} | ||
|
||
private def chronoFieldValue(chronoField: ChronoField) = { | ||
val range = chronoField.range | ||
Gen.long(range.getMinimum, range.getMaximum) | ||
} | ||
|
||
//FIXME There is a bug in JDK Duration parsing that caused issues in zio-json (https://github.com/zio/zio-json/issues/214). | ||
// Do not generate Durations with - seconds.Once that is addressed can remove filter condition | ||
val anyDuration: Gen[Random, Duration] = Gen.anyLong | ||
.zipWith(Gen.long(0, 999999999L)) { (seconds, nanos) => | ||
Duration.ofSeconds(seconds, nanos) | ||
} | ||
.filter(_.getSeconds > 0) | ||
|
||
val anyPeriod: Gen[Random, Period] = | ||
for { | ||
years <- Gen.anyInt | ||
months <- Gen.anyInt | ||
days <- Gen.anyInt | ||
} yield Period.of(years, months, days) | ||
|
||
val anyInstant: Gen[Random, Instant] = Gen | ||
.long(Instant.MIN.getEpochSecond, Instant.MAX.getEpochSecond) | ||
.zipWith(Gen.int(Instant.MIN.getNano, Instant.MAX.getNano)) { (seconds, nanos) => | ||
Instant.ofEpochSecond(seconds, nanos.toLong) | ||
} | ||
|
||
val anyLocalDate: Gen[Random, LocalDate] = anyEpochDay.map(LocalDate.ofEpochDay) | ||
|
||
val anyLocalTime: Gen[Random, LocalTime] = anyNanoOfDay.map(LocalTime.ofNanoOfDay) | ||
|
||
val anyLocalDateTime: Gen[Random, LocalDateTime] = anyLocalDate.zipWith(anyLocalTime) { (date, time) => | ||
LocalDateTime.of(date, time) | ||
} | ||
|
||
val anyZoneOffset: Gen[Random, ZoneOffset] = | ||
Gen.int(ZoneOffset.MIN.getTotalSeconds, ZoneOffset.MAX.getTotalSeconds).map(ZoneOffset.ofTotalSeconds) | ||
|
||
// This uses ZoneRulesProvider which has an effectful static initializer. | ||
// private val regionZoneIds = | ||
// ZIO.succeed(ZoneId.getAvailableZoneIds.asScala.toSet.map(ZoneId.of)) | ||
// | ||
// private val zoneOffsets = | ||
// (ZoneOffset.MIN.getTotalSeconds to ZoneOffset.MAX.getTotalSeconds).map(ZoneOffset.ofTotalSeconds) | ||
|
||
// private val zoneIds = regionZoneIds.map(_.toList ++ zoneOffsets) | ||
|
||
// FIXME: Shuffle is really slow. | ||
//private val zoneIds = | ||
// for { | ||
// ids <- regionZoneIds | ||
// all = ids ++ zoneOffsets | ||
// random <- ZIO.service[Random.Service] | ||
// shuffled <- random.shuffle(all.toList) | ||
// } yield shuffled | ||
|
||
//FIXME Sampling causes some sort of pathological performance issue. | ||
val anyZoneId: Gen[Random, ZoneId] = Gen.const(ZoneId.systemDefault()) | ||
// Gen(ZStream.fromIterableM(zoneIds).map { | ||
// case offset: ZoneOffset => Sample.noShrink(offset) | ||
// // FIXME: This is really slow even when it isn't shrinking. | ||
// //Sample.shrinkIntegral(ZoneOffset.UTC.getTotalSeconds)(offset.getTotalSeconds).map { seconds => | ||
// // ZoneOffset.ofTotalSeconds(seconds) | ||
// //} | ||
// case zone => Sample.noShrink(zone) | ||
// }) | ||
|
||
// TODO: This needs to be double checked. I have encountered problems generating these in the past. | ||
// See https://github.com/BotTech/scala-hedgehog-spines/blob/master/core/src/main/scala/com/lightbend/hedgehog/generators/time/TimeGenerators.scala | ||
val anyZonedDateTime: Gen[Random, ZonedDateTime] = anyLocalDateTime.zipWith(anyZoneId) { (dateTime, zone) => | ||
ZonedDateTime.of(dateTime, zone) | ||
} | ||
|
||
val anyOffsetTime: Gen[Random, OffsetTime] = anyLocalTime.zipWith(anyZoneOffset) { (time, offset) => | ||
OffsetTime.of(time, offset) | ||
} | ||
|
||
val anyOffsetDateTime: Gen[Random, OffsetDateTime] = anyLocalDateTime.zipWith(anyZoneOffset) { (dateTime, offset) => | ||
OffsetDateTime.of(dateTime, offset) | ||
} | ||
} |
Oops, something went wrong.