Skip to content

Commit

Permalink
Support for ZIO2-RC3 (#230)
Browse files Browse the repository at this point in the history
* Upgrade to ZIO2 RC2

* Update to ZIO2 RC3

* Upgrade zio-json snapshot version for ZIO2-RC3

* Fix string encoding compilation error

* Try to get timeouts sorted
  • Loading branch information
thinkharderdev authored Mar 29, 2022
1 parent 259b36d commit 54ad98f
Show file tree
Hide file tree
Showing 5 changed files with 19 additions and 19 deletions.
8 changes: 4 additions & 4 deletions project/BuildHelper.scala
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,10 @@ object BuildHelper {
val Scala213: String = versions("2.13")
val ScalaDotty: String = "3.1.0" //versions.getOrElse("3.0", versions("3.1"))

val zioVersion = "2.0.0-RC1"
val zioJsonVersion = "0.3.0-RC1-1"
val zioPreludeVersion = "1.0.0-RC9"
val zioOpticsVersion = "0.2.0-RC1"
val zioVersion = "2.0.0-RC3"
val zioJsonVersion = "0.3.0-RC4"
val zioPreludeVersion = "1.0.0-RC10"
val zioOpticsVersion = "0.2.0-RC2"
val silencerVersion = "1.7.8"

private val testDeps = Seq(
Expand Down
8 changes: 4 additions & 4 deletions tests/shared/src/test/scala/zio/schema/SchemaGen.scala
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ object SchemaGen {
def anyStructure(
schemaGen: Gen[Random with Sized, Schema[_]]
): Gen[Random with Sized, Seq[Schema.Field[_]]] =
Gen.setOfBounded(1, 8)(anyLabel).flatMap { keySet =>
Gen.setOfBounded(1, 3)(anyLabel).flatMap { keySet =>
Gen.setOfN(keySet.size)(schemaGen).map { schemas =>
keySet
.zip(schemas)
Expand All @@ -27,7 +27,7 @@ object SchemaGen {

def anyStructure[A](schema: Schema[A]): Gen[Random with Sized, Seq[Schema.Field[A]]] =
Gen
.setOfBounded(1, 8)(
.setOfBounded(1, 3)(
anyLabel.map(Schema.Field(_, schema))
)
.map(_.toSeq)
Expand All @@ -36,13 +36,13 @@ object SchemaGen {
schemaGen: Gen[Random with Sized, Schema[_]]
): Gen[Random with Sized, ListMap[String, Schema[_]]] =
Gen
.setOfBounded(1, 8)(
.setOfBounded(1, 3)(
anyLabel.zip(schemaGen)
)
.map(ListMap.empty ++ _)

def anyEnumeration[A](schema: Schema[A]): Gen[Random with Sized, ListMap[String, Schema[A]]] =
Gen.setOfBounded(1, 8)(anyLabel.map(_ -> schema)).map(ListMap.empty ++ _)
Gen.setOfBounded(1, 3)(anyLabel.map(_ -> schema)).map(ListMap.empty ++ _)

val anyPrimitive: Gen[Random, Schema.Primitive[_]] =
StandardTypeGen.anyStandardType.map(Schema.Primitive(_, Chunk.empty))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ object JsonCodec extends Codec {
case StandardType.LongType => ZJsonCodec.long
case StandardType.FloatType => ZJsonCodec.float
case StandardType.DoubleType => ZJsonCodec.double
case StandardType.BinaryType => ZJsonCodec.chunk(ZJsonCodec.byte)
case StandardType.BinaryType => ZJsonCodec.chunk(ZJsonCodec.byte.encoder, ZJsonCodec.byte.decoder)
case StandardType.CharType => ZJsonCodec.char
case StandardType.BigIntegerType => ZJsonCodec.bigInteger
case StandardType.BigDecimalType => ZJsonCodec.bigDecimal
Expand Down Expand Up @@ -100,10 +100,10 @@ object JsonCodec extends Codec {

//scalafmt: { maxColumn = 400, optIn.configStyleArguments = false }
private[codec] def schemaEncoder[A](schema: Schema[A]): JsonEncoder[A] = schema match {
case Schema.Primitive(standardType, _) => primitiveCodec(standardType)
case Schema.Primitive(standardType, _) => primitiveCodec(standardType).encoder
case Schema.Sequence(schema, _, g, _, _) => JsonEncoder.chunk(schemaEncoder(schema)).contramap(g)
case Schema.MapSchema(ks, vs, _) =>
JsonEncoder.chunk(schemaEncoder(ks).both(schemaEncoder(vs))).contramap(m => Chunk.fromIterable(m))
JsonEncoder.chunk(schemaEncoder(ks).zip(schemaEncoder(vs))).contramap(m => Chunk.fromIterable(m))
case Schema.SetSchema(s, _) =>
JsonEncoder.chunk(schemaEncoder(s)).contramap(m => Chunk.fromIterable(m))
case Schema.Transform(c, _, g, _, _) => transformEncoder(c, g)
Expand Down Expand Up @@ -217,13 +217,13 @@ object JsonCodec extends Codec {
val indent_ = bump(indent)
pad(indent_, out)
// schema
string.unsafeEncode(JsonFieldEncoder.string.unsafeEncodeField("schema"), indent_, out)
string.encoder.unsafeEncode(JsonFieldEncoder.string.unsafeEncodeField("schema"), indent_, out)
if (indent.isEmpty) out.write(':')
else out.write(" : ")
astEncoder.unsafeEncode(schema, indent_, out)
out.write(',')
// value
string.unsafeEncode(JsonFieldEncoder.string.unsafeEncodeField("value"), indent_, out)
string.encoder.unsafeEncode(JsonFieldEncoder.string.unsafeEncodeField("value"), indent_, out)
if (indent.isEmpty) out.write(':')
else out.write(" : ")
schemaEncoder(schema).unsafeEncode(value._1, indent_, out)
Expand All @@ -248,7 +248,7 @@ object JsonCodec extends Codec {
out.write('{')
val indent_ = bump(indent)
pad(indent_, out)
string.unsafeEncode(JsonFieldEncoder.string.unsafeEncodeField(case_.id), indent_, out)
string.encoder.unsafeEncode(JsonFieldEncoder.string.unsafeEncodeField(case_.id), indent_, out)
if (indent.isEmpty) out.write(':')
else out.write(" : ")
schemaEncoder(case_.codec.asInstanceOf[Schema[Any]]).unsafeEncode(case_.unsafeDeconstruct(value), indent, out)
Expand Down Expand Up @@ -278,7 +278,7 @@ object JsonCodec extends Codec {
if (indent.isDefined)
JsonEncoder.pad(indent_, out)
}
string.unsafeEncode(JsonFieldEncoder.string.unsafeEncodeField(k), indent_, out)
string.encoder.unsafeEncode(JsonFieldEncoder.string.unsafeEncodeField(k), indent_, out)
if (indent.isEmpty) out.write(':')
else out.write(" : ")
enc.unsafeEncode(value(k), indent_, out)
Expand All @@ -300,7 +300,7 @@ object JsonCodec extends Codec {

//scalafmt: { maxColumn = 400, optIn.configStyleArguments = false }
private[codec] def schemaDecoder[A](schema: Schema[A]): JsonDecoder[A] = schema match {
case Schema.Primitive(standardType, _) => primitiveCodec(standardType)
case Schema.Primitive(standardType, _) => primitiveCodec(standardType).decoder
case Schema.Optional(codec, _) => JsonDecoder.option(schemaDecoder(codec))
case Schema.Tuple(left, right, _) => JsonDecoder.tuple2(schemaDecoder(left), schemaDecoder(right))
case Schema.Transform(codec, f, _, _, _) => schemaDecoder(codec).mapOrFail(f)
Expand Down Expand Up @@ -510,7 +510,7 @@ object JsonCodec extends Codec {
JsonEncoder.pad(indent_, out)
}

string.unsafeEncode(JsonFieldEncoder.string.unsafeEncodeField(key), indent_, out)
string.encoder.unsafeEncode(JsonFieldEncoder.string.unsafeEncodeField(key), indent_, out)
if (indent.isEmpty) out.write(':')
else out.write(" : ")
enc.unsafeEncode(ext(a), indent_, out)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -758,7 +758,7 @@ object JsonCodecSpec extends DefaultRunnableSpec {
implicit keyEncoder: JsonEncoder[K],
valueEncoder: JsonEncoder[V]
): JsonEncoder[Map[K, V]] =
JsonEncoder.chunk(keyEncoder.both(valueEncoder)).contramap[Map[K, V]](m => Chunk.fromIterable(m))
JsonEncoder.chunk(keyEncoder.zip(valueEncoder)).contramap[Map[K, V]](m => Chunk.fromIterable(m))

private def jsonEncoded[A](value: A)(implicit enc: JsonEncoder[A]): Chunk[Byte] =
JsonCodec.Encoder.charSequenceToByteChunk(enc.encodeJson(value, None))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -829,7 +829,7 @@ object ThriftCodecSpec extends DefaultRunnableSpec {
)
)

def writeManually(f: TBinaryProtocol => Unit): Task[String] = Task {
def writeManually(f: TBinaryProtocol => Unit): Task[String] = Task.attempt {
val writeRecord = new ChunkTransport.Write()
f(new TBinaryProtocol(writeRecord))
toHex(writeRecord.chunk)
Expand Down

0 comments on commit 54ad98f

Please sign in to comment.