diff --git a/pom.xml b/pom.xml
index 05a68dc..4123913 100644
--- a/pom.xml
+++ b/pom.xml
@@ -3,7 +3,7 @@
io.kcache
kwack
- 0.6.0
+ 0.7.0
jar
kwack
diff --git a/src/main/java/io/kcache/kwack/KwackEngine.java b/src/main/java/io/kcache/kwack/KwackEngine.java
index 562c389..044ef9a 100644
--- a/src/main/java/io/kcache/kwack/KwackEngine.java
+++ b/src/main/java/io/kcache/kwack/KwackEngine.java
@@ -22,6 +22,7 @@
import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig;
import io.confluent.kafka.serializers.KafkaAvroDeserializerConfig;
+import io.confluent.kafka.serializers.KafkaJsonDeserializerConfig;
import io.kcache.CacheUpdateHandler;
import io.kcache.KafkaCache;
import io.kcache.KafkaCacheConfig;
@@ -438,7 +439,7 @@ private Tuple2 deserialize(boolean isKey, String topic, byte[]
Tuple2 schema =
isKey ? getKeySchema(serde, topic) : getValueSchema(serde, topic);
- Deserializer> deserializer = getDeserializer(schema);
+ Deserializer> deserializer = getDeserializer(isKey, schema);
if (serde.usesExternalSchema() || config.getSkipBytes() > 0) {
try (ByteArrayOutputStream out = new ByteArrayOutputStream()) {
@@ -492,11 +493,11 @@ private Tuple2 deserialize(boolean isKey, String topic, byte[]
return Tuple.of(ctx, object);
}
- public Deserializer> getDeserializer(Tuple2 schema) {
- return deserializers.computeIfAbsent(schema, this::createDeserializer);
+ public Deserializer> getDeserializer(boolean isKey, Tuple2 schema) {
+ return deserializers.computeIfAbsent(schema, k -> createDeserializer(isKey, schema));
}
- private Deserializer> createDeserializer(Tuple2 schema) {
+ private Deserializer> createDeserializer(boolean isKey, Tuple2 schema) {
if (schema._2 != null) {
ParsedSchema parsedSchema = schema._2;
SchemaRegistryClient schemaRegistry = null;
@@ -517,18 +518,28 @@ private Deserializer> createDeserializer(Tuple2 schema) {
originals.put(AbstractKafkaSchemaSerDeConfig.USE_SCHEMA_ID, schema._1.getId());
break;
}
+ Deserializer> deserializer = null;
switch (parsedSchema.schemaType()) {
case "AVRO":
// This allows BigDecimal to be passed through unchanged
originals.put(KafkaAvroDeserializerConfig.AVRO_USE_LOGICAL_TYPE_CONVERTERS_CONFIG, true);
- return new KafkaAvroDeserializer(schemaRegistry, originals);
+ deserializer = new KafkaAvroDeserializer(schemaRegistry);
+ break;
case "JSON":
- return new KafkaJsonSchemaDeserializer<>(schemaRegistry, originals);
+ // Set the type to null so JsonNode is produced
+ // Otherwise the type defaults to Object.class which produces a LinkedHashMap
+ originals.put(KafkaJsonDeserializerConfig.JSON_KEY_TYPE, null);
+ originals.put(KafkaJsonDeserializerConfig.JSON_VALUE_TYPE, null);
+ deserializer = new KafkaJsonSchemaDeserializer<>(schemaRegistry);
+ break;
case "PROTOBUF":
- return new KafkaProtobufDeserializer<>(schemaRegistry, originals);
+ deserializer = new KafkaProtobufDeserializer<>(schemaRegistry);
+ break;
default:
throw new IllegalArgumentException("Illegal type " + parsedSchema.schemaType());
}
+ deserializer.configure(originals, isKey);
+ return deserializer;
} else {
switch (schema._1.getSerdeType()) {
case STRING:
diff --git a/src/test/java/io/kcache/kwack/AbstractSchemaTest.java b/src/test/java/io/kcache/kwack/AbstractSchemaTest.java
index 38a80b1..38c4fda 100644
--- a/src/test/java/io/kcache/kwack/AbstractSchemaTest.java
+++ b/src/test/java/io/kcache/kwack/AbstractSchemaTest.java
@@ -16,14 +16,17 @@ protected Properties createProducerProps(String schemaRegistryUrl) {
Properties props = new Properties();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList);
props.put(SCHEMA_REGISTRY_URL, schemaRegistryUrl);
- props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
- org.apache.kafka.common.serialization.BytesSerializer.class);
+ props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, getKeySerializer());
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, getValueSerializer());
return props;
}
protected abstract String getTopic();
+ protected Class> getKeySerializer() {
+ return org.apache.kafka.common.serialization.BytesSerializer.class;
+ }
+
protected abstract Class> getValueSerializer();
protected KafkaProducer createProducer(Properties props) {
@@ -31,10 +34,20 @@ protected KafkaProducer createProducer(Properties props) {
}
protected void produce(KafkaProducer producer, String topic, Object[] objects) {
- ProducerRecord record;
- for (Object object : objects) {
- byte[] bytes = ByteBuffer.allocate(4).putInt(object.hashCode()).array();
- record = new ProducerRecord<>(topic, Bytes.wrap(bytes), object);
+ produce(producer, topic, null, objects);
+ }
+
+ protected void produce(KafkaProducer producer, String topic, Object[] keys, Object[] values) {
+ ProducerRecord