Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Test 4.16.1-SNAPSHOT driver #17

Draft
wants to merge 1 commit into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -53,23 +53,17 @@
import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata;
import com.datastax.oss.driver.api.core.metadata.schema.KeyspaceMetadata;
import com.datastax.oss.driver.api.core.metadata.schema.TableMetadata;
import com.datastax.oss.driver.api.core.type.CqlVectorType;
import com.datastax.oss.driver.api.core.type.DataType;
import com.datastax.oss.driver.api.core.type.DataTypes;
import com.datastax.oss.driver.api.core.type.codec.TypeCodec;
import com.datastax.oss.driver.api.core.type.reflect.GenericType;
import com.datastax.oss.driver.internal.core.auth.PlainTextAuthProvider;
import com.datastax.oss.driver.internal.core.config.typesafe.DefaultDriverConfigLoader;
import com.datastax.oss.driver.internal.core.config.typesafe.DefaultProgrammaticDriverConfigLoaderBuilder;
import com.datastax.oss.driver.internal.core.type.codec.CqlVectorCodec;
import com.datastax.oss.driver.internal.core.type.codec.registry.DefaultCodecRegistry;
import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting;
import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap;
import com.datastax.oss.dsbulk.codecs.api.ConvertingCodecFactory;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import edu.umd.cs.findbugs.annotations.NonNull;
import edu.umd.cs.findbugs.annotations.Nullable;
import java.net.InetSocketAddress;
import java.nio.file.Path;
import java.util.Collection;
Expand Down Expand Up @@ -97,20 +91,7 @@ public class LifeCycleManager {
new ConcurrentHashMap<>();
private static MetricRegistry metricRegistry = new MetricRegistry();
private static final DefaultCodecRegistry CODEC_REGISTRY =
new DefaultCodecRegistry("default-registry") {

protected TypeCodec<?> createCodec(
@Nullable DataType cqlType,
@Nullable GenericType<?> javaType,
boolean isJavaCovariant) {
if (cqlType instanceof CqlVectorType) {
log.info("Automatically Registering codec for CqlVectorType {}", cqlType);
CqlVectorType vectorType = (CqlVectorType) cqlType;
return new CqlVectorCodec<>(vectorType, codecFor(vectorType.getSubtype()));
}
return super.createCodec(cqlType, javaType, isJavaCovariant);
}
};
new DefaultCodecRegistry("default-registry");

/** This is a utility class that no one should instantiate. */
private LifeCycleManager() {}
Expand Down
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
<java.release.version>8</java.release.version>
<kafka.connect.version>2.4.0</kafka.connect.version>
<caffeine.version>2.6.2</caffeine.version>
<oss.driver.version>4.16.0</oss.driver.version>
<oss.driver.version>4.16.1-SNAPSHOT</oss.driver.version>
<dsbulk.version>1.10.0</dsbulk.version>
<reactive-streams.version>1.0.3</reactive-streams.version>
<guava.version>25.1-jre</guava.version>
Expand Down
4 changes: 2 additions & 2 deletions text/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
<artifactId>dsbulk-codecs</artifactId>
<groupId>com.datastax.oss</groupId>
<version>1.10.0</version>
<relativePath />
<relativePath/>
</parent>
<groupId>com.datastax.oss</groupId>
<version>1.0.16-SNAPSHOT</version>
Expand All @@ -44,7 +44,7 @@
<dependency>
<groupId>com.datastax.oss</groupId>
<artifactId>java-driver-core</artifactId>
<version>4.16.0</version>
<version>4.16.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.datastax.oss</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,6 @@
import com.datastax.oss.driver.api.core.CqlIdentifier;
import com.datastax.oss.driver.api.core.data.TupleValue;
import com.datastax.oss.driver.api.core.data.UdtValue;
import com.datastax.oss.driver.api.core.type.CqlVectorType;
import com.datastax.oss.driver.api.core.type.CustomType;
import com.datastax.oss.driver.api.core.type.DataType;
import com.datastax.oss.driver.api.core.type.DataTypes;
Expand All @@ -73,12 +72,13 @@
import com.datastax.oss.driver.api.core.type.SetType;
import com.datastax.oss.driver.api.core.type.TupleType;
import com.datastax.oss.driver.api.core.type.UserDefinedType;
import com.datastax.oss.driver.api.core.type.VectorType;
import com.datastax.oss.driver.api.core.type.codec.CodecNotFoundException;
import com.datastax.oss.driver.api.core.type.codec.TypeCodec;
import com.datastax.oss.driver.api.core.type.codec.TypeCodecs;
import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry;
import com.datastax.oss.driver.api.core.type.reflect.GenericType;
import com.datastax.oss.driver.internal.core.type.codec.CqlVectorCodec;
import com.datastax.oss.driver.internal.core.type.codec.VectorCodec;
import com.datastax.oss.driver.internal.core.type.codec.registry.DefaultCodecRegistry;
import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList;
import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap;
Expand Down Expand Up @@ -117,6 +117,8 @@ public class JsonNodeConvertingCodecProvider implements ConvertingCodecProvider
private static final String DATE_RANGE_CLASS_NAME =
"org.apache.cassandra.db.marshal.DateRangeType";

private static final String VECTOR_CLASS_NAME = "org.apache.cassandra.db.marshal.VectorType";

@NonNull
@Override
public Optional<ConvertingCodec<?, ?>> maybeProvide(
Expand Down Expand Up @@ -393,22 +395,25 @@ public class JsonNodeConvertingCodecProvider implements ConvertingCodecProvider
nullStrings);
case DATE_RANGE_CLASS_NAME:
return new JsonNodeToDateRangeCodec(nullStrings);
case CqlVectorType.CQLVECTOR_CLASS_NAME:
CqlVectorType cqlVectorType = (CqlVectorType) cqlType;
case VECTOR_CLASS_NAME:
VectorType cqlVectorType = (VectorType) cqlType;
// Step 1: create a JSON codec which will take the input JSON nodes and generate
// something matching the expected data type
ConvertingCodec<JsonNode, ?> jsonCodec =
createJsonNodeConvertingCodec(cqlVectorType.getSubtype(), codecFactory, false);
createJsonNodeConvertingCodec(
cqlVectorType.getElementType(), codecFactory, false);
// Step 2: create a conventional codec which will take instances of the Java type
// generated by the JSON codec above and perform standard serde on them.
ConvertingCodec<?, ?> standardCodec =
codecFactory.createConvertingCodec(
cqlVectorType.getSubtype(), jsonCodec.getInternalJavaType(), false);
cqlVectorType.getElementType(), jsonCodec.getInternalJavaType(), false);
return new JsonNodeToVectorCodec(
new CqlVectorCodec(cqlVectorType, standardCodec),
new VectorCodec(cqlVectorType, standardCodec),
jsonCodec,
context.getAttribute(OBJECT_MAPPER),
nullStrings);
default:
LOGGER.error("Unsupported custom type {}", customType.getClassName());
}
}
// fall through
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,21 +16,23 @@
package com.datastax.oss.dsbulk.codecs.text.json;

import com.datastax.oss.driver.api.core.data.CqlVector;
import com.datastax.oss.driver.internal.core.type.codec.CqlVectorCodec;
import com.datastax.oss.driver.internal.core.type.codec.VectorCodec;
import com.datastax.oss.dsbulk.codecs.api.ConvertingCodec;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;

public class JsonNodeToVectorCodec<SubtypeT> extends JsonNodeConvertingCodec<CqlVector<SubtypeT>> {
public class JsonNodeToVectorCodec<SubtypeT extends Number>
extends JsonNodeConvertingCodec<CqlVector<SubtypeT>> {

private final ConvertingCodec<JsonNode, SubtypeT> subtypeCodec;
private final ObjectMapper objectMapper;

public JsonNodeToVectorCodec(
CqlVectorCodec<SubtypeT> targetCodec,
VectorCodec<SubtypeT> targetCodec,
ConvertingCodec<JsonNode, SubtypeT> subtypeCodec,
ObjectMapper objectMapper,
List<String> nullStrings) {
Expand All @@ -42,17 +44,17 @@ public JsonNodeToVectorCodec(
@Override
public CqlVector<SubtypeT> externalToInternal(JsonNode jsonNode) {
if (jsonNode == null || !jsonNode.isArray()) return null;
CqlVector.Builder<SubtypeT> builder = CqlVector.builder();
List<SubtypeT> result = new ArrayList<>();
for (Iterator<JsonNode> it = jsonNode.elements(); it.hasNext(); )
builder.add(subtypeCodec.externalToInternal(it.next()));
return builder.build();
result.add(subtypeCodec.externalToInternal(it.next()));
return CqlVector.newInstance(result);
}

@Override
public JsonNode internalToExternal(CqlVector<SubtypeT> value) {
if (value == null) return null;
ArrayNode root = objectMapper.createArrayNode();
for (SubtypeT element : value.getValues()) {
for (SubtypeT element : value) {
root.add(subtypeCodec.internalToExternal(element));
}
return root;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,15 +62,15 @@

import com.datastax.oss.driver.api.core.data.TupleValue;
import com.datastax.oss.driver.api.core.data.UdtValue;
import com.datastax.oss.driver.api.core.type.CqlVectorType;
import com.datastax.oss.driver.api.core.type.CustomType;
import com.datastax.oss.driver.api.core.type.DataType;
import com.datastax.oss.driver.api.core.type.DataTypes;
import com.datastax.oss.driver.api.core.type.VectorType;
import com.datastax.oss.driver.api.core.type.codec.CodecNotFoundException;
import com.datastax.oss.driver.api.core.type.codec.TypeCodec;
import com.datastax.oss.driver.api.core.type.codec.TypeCodecs;
import com.datastax.oss.driver.api.core.type.reflect.GenericType;
import com.datastax.oss.driver.internal.core.type.codec.CqlVectorCodec;
import com.datastax.oss.driver.internal.core.type.codec.VectorCodec;
import com.datastax.oss.driver.internal.core.type.codec.registry.DefaultCodecRegistry;
import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList;
import com.datastax.oss.dsbulk.codecs.api.ConversionContext;
Expand Down Expand Up @@ -105,6 +105,7 @@ public class StringConvertingCodecProvider implements ConvertingCodecProvider {
private static final String POLYGON_CLASS_NAME = "org.apache.cassandra.db.marshal.PolygonType";
private static final String DATE_RANGE_CLASS_NAME =
"org.apache.cassandra.db.marshal.DateRangeType";
private static final String VECTOR_CLASS_NAME = "org.apache.cassandra.db.marshal.VectorType";

@NonNull
@Override
Expand Down Expand Up @@ -332,13 +333,15 @@ public class StringConvertingCodecProvider implements ConvertingCodecProvider {
return new StringToPolygonCodec(context.getAttribute(GEO_FORMAT), nullStrings);
case DATE_RANGE_CLASS_NAME:
return new StringToDateRangeCodec(nullStrings);
case CqlVectorType.CQLVECTOR_CLASS_NAME:
CqlVectorType cqlVectorType = (CqlVectorType) cqlType;
case VECTOR_CLASS_NAME:
VectorType cqlVectorType = (VectorType) cqlType;
ConvertingCodec<String, ?> subtypeCodec =
codecFactory.createConvertingCodec(
cqlVectorType.getSubtype(), GenericType.STRING, false);
cqlVectorType.getElementType(), GenericType.STRING, false);
return new StringToVectorCodec(
new CqlVectorCodec(cqlVectorType, subtypeCodec), nullStrings);
new VectorCodec(cqlVectorType, subtypeCodec), nullStrings);
default:
LOGGER.error("Unsupported custom type {}", customType.getClassName());
}
}
// fall through
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,13 @@
package com.datastax.oss.dsbulk.codecs.text.string;

import com.datastax.oss.driver.api.core.data.CqlVector;
import com.datastax.oss.driver.internal.core.type.codec.CqlVectorCodec;
import com.datastax.oss.driver.internal.core.type.codec.VectorCodec;
import java.util.List;

public class StringToVectorCodec<SubtypeT> extends StringConvertingCodec<CqlVector<SubtypeT>> {
public class StringToVectorCodec<SubtypeT extends Number>
extends StringConvertingCodec<CqlVector<SubtypeT>> {

public StringToVectorCodec(CqlVectorCodec<SubtypeT> subcodec, List<String> nullStrings) {
public StringToVectorCodec(VectorCodec<SubtypeT> subcodec, List<String> nullStrings) {
super(subcodec, nullStrings);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,10 @@
import static com.datastax.oss.dsbulk.tests.assertions.TestAssertions.assertThat;

import com.datastax.oss.driver.api.core.data.CqlVector;
import com.datastax.oss.driver.api.core.type.CqlVectorType;
import com.datastax.oss.driver.api.core.type.DataTypes;
import com.datastax.oss.driver.api.core.type.codec.TypeCodecs;
import com.datastax.oss.driver.api.core.type.reflect.GenericType;
import com.datastax.oss.driver.internal.core.type.codec.CqlVectorCodec;
import com.datastax.oss.driver.internal.core.type.codec.VectorCodec;
import com.datastax.oss.driver.shaded.guava.common.collect.Lists;
import com.datastax.oss.dsbulk.codecs.api.ConvertingCodecFactory;
import com.fasterxml.jackson.databind.JsonNode;
Expand All @@ -34,9 +33,9 @@

public class JsonNodeToVectorCodecTest {
private final ArrayList<Float> values = Lists.newArrayList(1.1f, 2.2f, 3.3f, 4.4f, 5.5f);
private final CqlVector vector = CqlVector.builder().addAll(values).build();
private final CqlVectorCodec vectorCodec =
new CqlVectorCodec(new CqlVectorType(DataTypes.FLOAT, 5), TypeCodecs.FLOAT);
private final CqlVector vector = CqlVector.newInstance(values);
private final VectorCodec vectorCodec =
new VectorCodec(DataTypes.vectorOf(DataTypes.FLOAT, 5), TypeCodecs.FLOAT);
private final ArrayNode vectorDoc;

private final ConvertingCodecFactory factory = new ConvertingCodecFactory();
Expand Down Expand Up @@ -87,8 +86,8 @@ void should_not_convert_from_invalid_internal() {
tooFew.remove(0);

assertThat(dsbulkCodec)
.cannotConvertFromInternal(CqlVector.builder().addAll(tooMany).build())
.cannotConvertFromInternal(CqlVector.builder().addAll(tooFew).build())
.cannotConvertFromInternal(CqlVector.newInstance((tooMany)))
.cannotConvertFromInternal(CqlVector.newInstance((tooFew)))
.cannotConvertFromInternal("not a valid vector");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,9 @@
import static com.datastax.oss.dsbulk.tests.assertions.TestAssertions.assertThat;

import com.datastax.oss.driver.api.core.data.CqlVector;
import com.datastax.oss.driver.api.core.type.CqlVectorType;
import com.datastax.oss.driver.api.core.type.DataTypes;
import com.datastax.oss.driver.api.core.type.codec.TypeCodecs;
import com.datastax.oss.driver.internal.core.type.codec.CqlVectorCodec;
import com.datastax.oss.driver.internal.core.type.codec.VectorCodec;
import com.datastax.oss.driver.shaded.guava.common.collect.Lists;
import java.util.ArrayList;
import org.junit.jupiter.api.Disabled;
Expand All @@ -30,9 +29,9 @@
public class StringToVectorCodecTest {

private final ArrayList<Float> values = Lists.newArrayList(1.1f, 2.2f, 3.3f, 4.4f, 5.5f);
private final CqlVector vector = CqlVector.builder().addAll(values).build();
private final CqlVectorCodec vectorCodec =
new CqlVectorCodec(new CqlVectorType(DataTypes.FLOAT, 5), TypeCodecs.FLOAT);
private final CqlVector vector = CqlVector.newInstance(values);
private final VectorCodec vectorCodec =
new VectorCodec(DataTypes.vectorOf(DataTypes.FLOAT, 5), TypeCodecs.FLOAT);

private final StringToVectorCodec dsbulkCodec =
new StringToVectorCodec(vectorCodec, Lists.newArrayList("NULL"));
Expand Down Expand Up @@ -69,8 +68,8 @@ void should_not_convert_from_invalid_internal() {
tooFew.remove(0);

assertThat(dsbulkCodec)
.cannotConvertFromInternal(CqlVector.builder().addAll(tooMany).build())
.cannotConvertFromInternal(CqlVector.builder().addAll(tooFew).build())
.cannotConvertFromInternal(CqlVector.newInstance((tooMany)))
.cannotConvertFromInternal(CqlVector.newInstance((tooFew)))
.cannotConvertFromInternal("not a valid vector");
}
}