diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..c0fb2d2 --- /dev/null +++ b/.gitignore @@ -0,0 +1,27 @@ +# Compiled class file +*.class + +# Log file +*.log + +# BlueJ files +*.ctxt + +# Mobile Tools for Java (J2ME) +.mtj.tmp/ + +# Package Files # +*.jar +*.war +*.nar +*.ear +*.zip +*.tar.gz +*.rar + +*.idea +*.iml +target +# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml +hs_err_pid* +replay_pid* diff --git a/hsweb-incubator-easyorm-clickhouse.iml b/hsweb-incubator-easyorm-clickhouse.iml new file mode 100644 index 0000000..4cb7b15 --- /dev/null +++ b/hsweb-incubator-easyorm-clickhouse.iml @@ -0,0 +1,86 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..0ab5a5f --- /dev/null +++ b/pom.xml @@ -0,0 +1,90 @@ + + + 4.0.0 + + org.hswebframework + hsweb-incubator-easyorm-clickhouse + 1.0-SNAPSHOT + + + 8 + 8 + 4.0.17-SNAPSHOT + + + + + org.hswebframework.web + hsweb-commons-crud + ${hsweb.framework.version} + + + org.springframework + spring-webflux + 5.3.25 + + + + + aliyun-nexus + aliyun + https://maven.aliyun.com/nexus/content/groups/public/ + + false + + + + + hsweb-nexus + Nexus Release Repository + https://nexus.jetlinks.cn/content/groups/public/ + + false + + + true + always + + + + + + + sct-releases + Nexus Release Repository + http://192.168.9.91:81/repository/maven-releases/ + + + sct-snapshots + Nexus Snapshot Repository + http://192.168.9.91:81/repository/maven-snapshots/ + + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + 2.8.2 + + + + org.apache.maven.plugins + maven-source-plugin + 2.1.2 + + + package + + jar + + + + + + + \ No newline at end of file diff --git a/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/ClickhouseDataType.java b/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/ClickhouseDataType.java new file mode 100644 index 0000000..ad3c289 --- /dev/null +++ b/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/ClickhouseDataType.java @@ -0,0 +1,54 @@ +package org.hswebframework.ezorm.rdb.supports.clickhouse; + +import lombok.AllArgsConstructor; +import lombok.Getter; +import org.hswebframework.web.dict.EnumDict; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.sql.Array; + +/** + * @author dengpengyu + * @date 2023/10/12 15:16 + */ +@AllArgsConstructor +@Getter +public enum ClickhouseDataType implements EnumDict> { + + INT8(Byte.class), + INT16(Short.class), + INT32(Integer.class), + INT64(Long.class), + INT128(BigInteger.class), + INT256(BigInteger.class), + UINT8(Short.class), + UINT16(Integer.class), + UINT32(Long.class), + UINT64(BigInteger.class), + NULLABLE_UINT64(BigInteger.class), + FLOAT32(Float.class), + FLOAT64(Double.class), + STRING(String.class), + UUID(String.class), + BOOLEAN(Boolean.class), + DATE(java.sql.Date.class), + DATETIME(java.sql.Timestamp.class), + DATETIME64(java.time.LocalDateTime.class), + ARRAY(Array.class), // This would be a placeholder, actual implementation depends on the array type + ENUM(String.class), // Enums in Clickhouse are typically represented as strings in Java + DECIMAL(BigDecimal.class), + IP(String.class); // IP address types can be represented as strings + + private final Class javaType; + + @Override + public Class getValue() { + return javaType; + } + + @Override + public String getText() { + return name(); + } +} diff --git a/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/ClickhouseDefaultRepository.java b/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/ClickhouseDefaultRepository.java new file mode 100644 index 0000000..93e75a3 --- /dev/null +++ b/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/ClickhouseDefaultRepository.java @@ -0,0 +1,262 @@ +package org.hswebframework.ezorm.rdb.supports.clickhouse; + +import lombok.Getter; +import lombok.Setter; +import org.hswebframework.ezorm.core.GlobalConfig; +import org.hswebframework.ezorm.core.ObjectPropertyOperator; +import org.hswebframework.ezorm.rdb.events.ContextKeyValue; +import org.hswebframework.ezorm.rdb.events.ContextKeys; +import org.hswebframework.ezorm.rdb.executor.wrapper.ResultWrapper; +import org.hswebframework.ezorm.rdb.mapping.EntityColumnMapping; +import org.hswebframework.ezorm.rdb.mapping.LazyEntityColumnMapping; +import org.hswebframework.ezorm.rdb.mapping.MappingFeatureType; +import org.hswebframework.ezorm.rdb.mapping.events.EventResultOperator; +import org.hswebframework.ezorm.rdb.mapping.events.MappingContextKeys; +import org.hswebframework.ezorm.rdb.mapping.events.MappingEventTypes; +import org.hswebframework.ezorm.rdb.metadata.RDBColumnMetadata; +import org.hswebframework.ezorm.rdb.metadata.RDBTableMetadata; +import org.hswebframework.ezorm.rdb.operator.DatabaseOperator; +import org.hswebframework.ezorm.rdb.operator.builder.fragments.NativeSql; +import org.hswebframework.ezorm.rdb.operator.dml.insert.InsertOperator; +import org.hswebframework.ezorm.rdb.operator.dml.insert.InsertResultOperator; +import org.hswebframework.ezorm.rdb.operator.dml.upsert.SaveResultOperator; +import org.hswebframework.ezorm.rdb.operator.dml.upsert.UpsertOperator; + +import java.util.*; +import java.util.function.BiConsumer; +import java.util.function.Supplier; +import java.util.stream.Stream; + +import static org.hswebframework.ezorm.rdb.events.ContextKeys.tableMetadata; +import static org.hswebframework.ezorm.rdb.mapping.events.MappingContextKeys.*; +import static org.hswebframework.ezorm.rdb.mapping.events.MappingContextKeys.insert; + +/** + * @className ClickhouseDefaultRepository + * @Description TODO + * @Author zhong + * @Date 2024/1/19 9:32 + * @Vesion 1.0 + */ +public abstract class ClickhouseDefaultRepository { + protected DatabaseOperator operator; + + protected ResultWrapper wrapper; + + private volatile String idColumn; + + @Getter + protected EntityColumnMapping mapping; + + @Setter + protected volatile String[] properties; + + protected Supplier tableSupplier; + + protected final List> defaultContextKeyValue = new ArrayList<>(); + + public ClickhouseDefaultRepository(DatabaseOperator operator, Supplier supplier, ResultWrapper wrapper) { + this.operator = operator; + this.wrapper = wrapper; + this.tableSupplier = supplier; + defaultContextKeyValue.add(repository.value(this)); + defaultContextKeyValue.add(ContextKeys.database.value(operator)); + + } + + protected RDBTableMetadata getTable() { + return tableSupplier.get(); + } + + protected ContextKeyValue[] getDefaultContextKeyValue(ContextKeyValue... kv) { + if (kv.length == 0) { + return defaultContextKeyValue.toArray(new ContextKeyValue[0]); + } + List> keyValues = new ArrayList<>(defaultContextKeyValue); + keyValues.addAll(Arrays.asList(kv)); + return keyValues.toArray(new ContextKeyValue[0]); + } + + public String[] getProperties() { + if (properties == null) { + properties = mapping + .getColumnPropertyMapping() + .entrySet() + .stream() + .filter(kv -> getTable().getColumn(kv.getKey()).isPresent()) + .map(Map.Entry::getValue) + .toArray(String[]::new); + } + return properties; + } + + protected String getIdColumn() { + if (idColumn == null) { + this.idColumn = getTable() + .getColumns() + .stream() + .filter(RDBColumnMetadata::isPrimaryKey) + .findFirst() + .map(RDBColumnMetadata::getName) + .orElseThrow(() -> new UnsupportedOperationException("id column not exists")); + } + return idColumn; + } + + protected void initMapping(Class entityType) { + + this.mapping = LazyEntityColumnMapping.of(() -> getTable() + .findFeature(MappingFeatureType.columnPropertyMapping.createFeatureId(entityType)) + .orElseThrow(() -> new UnsupportedOperationException("unsupported columnPropertyMapping feature"))); + defaultContextKeyValue.add(MappingContextKeys.columnMapping(mapping)); + } + + protected Collection tryMergeDuplicate(Collection data) { + if (data.isEmpty()) { + return data; + } + Map merging = new HashMap<>(data.size()); + List merged = new ArrayList<>(data.size()); + for (E datum : data) { + Object id = getProperty(datum, getIdColumn()); + if (id == null) { + merged.add(datum); + } else { + merging.compute(id, (_id, old) -> { + if (old != null) { + return merge(old, datum); + } + return datum; + }); + } + } + merged.addAll(merging.values()); + return merged; + } + + protected E merge(E older, E newer) { + ObjectPropertyOperator opt = GlobalConfig.getPropertyOperator(); + for (String property : getProperties()) { + Object newerVal = opt.getProperty(newer, property).orElse(null); + if (newerVal != null) { + continue; + } + opt.getProperty(older, property) + .ifPresent(olderValue -> opt.setProperty(newer, property, olderValue)); + + } + return newer; + } + + private Object getProperty(E data, String property) { + return GlobalConfig + .getPropertyOperator() + .getProperty(data, property) + .orElse(null); + } + + protected SaveResultOperator doSave(Collection data) { + Collection _data = tryMergeDuplicate(data); + RDBTableMetadata table = getTable(); + UpsertOperator upsert = operator.dml().upsert(table); + + return EventResultOperator.create( + () -> { + upsert.columns(getProperties()); + List ignore = new ArrayList<>(); + for (E e : _data) { + upsert.values(Stream.of(getProperties()) + .map(property -> getInsertColumnValue(e, property, (prop, val) -> ignore.add(prop))) + .toArray()); + } + upsert.ignoreUpdate(ignore.toArray(new String[0])); + return upsert.execute(); + }, + SaveResultOperator.class, + table, + MappingEventTypes.save_before, + MappingEventTypes.save_after, + getDefaultContextKeyValue(instance(_data), + type("batch"), + tableMetadata(table), + upsert(upsert)) + ); + } + + protected InsertResultOperator doInsert(E data) { + RDBTableMetadata table = getTable(); + InsertOperator insert = operator.dml().insert(table); + + return EventResultOperator.create( + () -> { + for (Map.Entry entry : mapping.getColumnPropertyMapping().entrySet()) { + String column = entry.getKey(); + String property = entry.getValue(); + insert.value(column, getInsertColumnValue(data, property)); + } + return insert.execute(); + }, + InsertResultOperator.class, + table, + MappingEventTypes.insert_before, + MappingEventTypes.insert_after, + getDefaultContextKeyValue( + instance(data), + type("single"), + tableMetadata(table), + insert(insert)) + ); + + } + + private Object getInsertColumnValue(E data, String property, BiConsumer whenDefaultValue) { + Object value = GlobalConfig.getPropertyOperator().getProperty(data, property).orElse(null); + if (value == null) { + value = mapping.getColumnByProperty(property) + .flatMap(RDBColumnMetadata::generateDefaultValue) + .orElse(null); + if (value != null) { + whenDefaultValue.accept(property, value); + //回填 + if (!(value instanceof NativeSql)) { + GlobalConfig.getPropertyOperator().setProperty(data, property, value); + } + } + } + return value; + } + + private Object getInsertColumnValue(E data, String property) { + + return getInsertColumnValue(data, property, (prop, val) -> { + }); + } + + protected InsertResultOperator doInsert(Collection batch) { + Collection _data = tryMergeDuplicate(batch); + RDBTableMetadata table = getTable(); + InsertOperator insert = operator.dml().insert(table); + + return EventResultOperator.create( + () -> { + insert.columns(getProperties()); + + for (E e : _data) { + insert.values(Stream.of(getProperties()) + .map(property -> getInsertColumnValue(e, property)) + .toArray()); + } + return insert.execute(); + }, + InsertResultOperator.class, + table, + MappingEventTypes.insert_before, + MappingEventTypes.insert_after, + getDefaultContextKeyValue( + instance(_data), + type("batch"), + tableMetadata(table), + insert(insert)) + ); + } +} diff --git a/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/ClickhouseDialect.java b/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/ClickhouseDialect.java new file mode 100644 index 0000000..ccb020f --- /dev/null +++ b/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/ClickhouseDialect.java @@ -0,0 +1,53 @@ +package org.hswebframework.ezorm.rdb.supports.clickhouse; + +import org.hswebframework.ezorm.rdb.metadata.JdbcDataType; +import org.hswebframework.ezorm.rdb.metadata.dialect.DefaultDialect; + +import java.sql.JDBCType; + +/** + * @className ClickhouseDire + * @Description TODO + * @Author dengpengyu + * @Date 2023/9/4 14:53 + * @Vesion 1.0 + */ +public class ClickhouseDialect extends DefaultDialect { + + public ClickhouseDialect() { + addDataTypeBuilder(JDBCType.TINYINT, (meta) -> ClickhouseDataType.INT8.getText()); + addDataTypeBuilder(JDBCType.BOOLEAN, (meta) -> ClickhouseDataType.INT8.getText()); + addDataTypeBuilder(JDBCType.SMALLINT, (meta) -> ClickhouseDataType.INT16.getText()); + addDataTypeBuilder(JDBCType.INTEGER, (meta) -> ClickhouseDataType.INT32.getText()); + addDataTypeBuilder(JDBCType.BIGINT, (meta) -> ClickhouseDataType.INT64.getText()); + addDataTypeBuilder(JDBCType.VARCHAR, (meta) -> ClickhouseDataType.STRING.getText()); + registerDataType("uuid", JdbcDataType.of(JDBCType.VARCHAR, String.class)); + registerDataType("timestamp", JdbcDataType.of(JDBCType.BIGINT, Long.class)); + registerDataType("date", JdbcDataType.of(JDBCType.VARCHAR, String.class)); + } + + @Override + public String getQuoteStart() { + return "`"; + } + + @Override + public String getQuoteEnd() { + return "`"; + } + + @Override + public boolean isColumnToUpperCase() { + return false; + } + + @Override + public String getId() { + return "clickhouse"; + } + + @Override + public String getName() { + return "Clickhouse"; + } +} diff --git a/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/ClickhouseHelper.java b/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/ClickhouseHelper.java new file mode 100644 index 0000000..2ad3df2 --- /dev/null +++ b/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/ClickhouseHelper.java @@ -0,0 +1,52 @@ +package org.hswebframework.ezorm.rdb.supports.clickhouse; + + +import org.hswebframework.ezorm.rdb.executor.reactive.ReactiveSqlExecutor; + +import org.hswebframework.ezorm.rdb.metadata.RDBSchemaMetadata; +import org.hswebframework.ezorm.rdb.metadata.dialect.Dialect; +import org.hswebframework.web.crud.configuration.ClickhouseProperties; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.context.annotation.Configuration; +import org.springframework.web.reactive.function.client.WebClient; + + +/** + * @author dengpengyu + * @date 2023/9/21 14:28 + */ +@Configuration +@EnableConfigurationProperties(ClickhouseProperties.class) +public class ClickhouseHelper implements Helper { + + @Autowired + ClickhouseProperties properties; + + @Override + public RDBSchemaMetadata getRDBSchemaMetadata() { + return new ClickhouseSchemaMetadata(properties.getDatabase()); + } + + @Override + public Dialect getDialect() { + return new ClickhouseDialect(); + } + + @Override + public ReactiveSqlExecutor getReactiveSqlExecutor() { + WebClient clickhouseWebClient = WebClient + .builder() + .baseUrl(properties.getUrl()) + .defaultHeader("X-ClickHouse-User", properties.getUsername()) + .defaultHeader("X-ClickHouse-Key", properties.getPassword()) + .build(); + + return new ClickhouseReactiveSqlExecutor(clickhouseWebClient); + } + + @Override + public ClickhouseProperties getClickhouseProperties() { + return properties; + } +} diff --git a/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/ClickhouseReactiveDefaultRepository.java b/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/ClickhouseReactiveDefaultRepository.java new file mode 100644 index 0000000..cbcbc71 --- /dev/null +++ b/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/ClickhouseReactiveDefaultRepository.java @@ -0,0 +1,163 @@ +package org.hswebframework.ezorm.rdb.supports.clickhouse; + +import org.apache.commons.collections4.CollectionUtils; +import org.hswebframework.ezorm.rdb.executor.wrapper.ResultWrapper; +import org.hswebframework.ezorm.rdb.mapping.ReactiveDelete; +import org.hswebframework.ezorm.rdb.mapping.ReactiveQuery; +import org.hswebframework.ezorm.rdb.mapping.ReactiveRepository; +import org.hswebframework.ezorm.rdb.mapping.ReactiveUpdate; +import org.hswebframework.ezorm.rdb.mapping.defaults.*; +import org.hswebframework.ezorm.rdb.metadata.RDBTableMetadata; +import org.hswebframework.ezorm.rdb.operator.DatabaseOperator; +import org.hswebframework.ezorm.rdb.operator.dml.QueryOperator; +import org.reactivestreams.Publisher; +import org.slf4j.Logger; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.util.Collection; +import java.util.function.Supplier; + +/** + * @className ClickhouseReactiveDefaultRepository + * @Description TODO + * @Author zhong + * @Date 2024/1/19 9:39 + * @Vesion 1.0 + */ +public class ClickhouseReactiveDefaultRepository extends ClickhouseDefaultRepository implements ReactiveRepository { + private final Logger logger; + + public ClickhouseReactiveDefaultRepository(DatabaseOperator operator, String table, Class type, ResultWrapper wrapper) { + this(operator, + () -> operator + .getMetadata() + .getTable(table) + .orElseThrow(() -> new UnsupportedOperationException("table [" + table + "] doesn't exist")), type, wrapper); + } + + public ClickhouseReactiveDefaultRepository(DatabaseOperator operator, RDBTableMetadata table, Class type, ResultWrapper wrapper) { + this(operator, () -> table, type, wrapper); + } + + public ClickhouseReactiveDefaultRepository(DatabaseOperator operator, Supplier table, Class type, ResultWrapper wrapper) { + super(operator, table, wrapper); + initMapping(type); + this.logger = getLogger(type); + } + + private static Logger getLogger(Class type) { + return org.slf4j.LoggerFactory.getLogger(type); + } + + @Override + public Mono newInstance() { + return Mono.fromSupplier(wrapper::newRowInstance); + } + + @Override + public Mono findById(Mono primaryKey) { + return primaryKey + .flatMap(k -> createQuery().where(getIdColumn(), k).fetchOne()); + } + + @Override + public Flux findById(Flux key) { + return key.collectList() + .filter(CollectionUtils::isNotEmpty) + .flatMapMany(idList -> createQuery().where().in(getIdColumn(), idList).fetch()); + } + + @Override + public Mono deleteById(Publisher key) { + return Flux.from(key) + .collectList() + .filter(CollectionUtils::isNotEmpty) + .flatMap(list -> createDelete().where().in(getIdColumn(), list).execute()) + .defaultIfEmpty(0); + } + + @Override + public Mono updateById(K id, Mono data) { + return data + .flatMap(_data -> createUpdate() + .where(getIdColumn(), id) + .set(_data) + .execute()); + } + + @Override + public Mono save(Publisher data) { + return Flux + .from(data) + .collectList() + .filter(CollectionUtils::isNotEmpty) + .flatMap(list -> doSave(list).reactive().as(this::setupLogger)) + .defaultIfEmpty(SaveResult.of(0, 0)); + } + + @Override + public Mono insert(Publisher data) { + return Flux + .from(data) + .buffer(100) + .as(this::insertBatch); + } + + @Override + public Mono insertBatch(Publisher> data) { + return Flux + .from(data) + .filter(CollectionUtils::isNotEmpty) + .flatMap(e -> doInsert(e).reactive()) + .reduce(Math::addExact) + .defaultIfEmpty(0) + .as(this::setupLogger); + } + + @Override + public ReactiveQuery createQuery() { + return new DefaultReactiveQuery<>(getTable() + , mapping + , operator.dml() + , wrapper + , logger + , getDefaultContextKeyValue()); + } + + @Override + public ReactiveUpdate createUpdate() { + return new DefaultReactiveUpdate<>( + getTable() + , operator.dml().update(getTable().getFullName()) + , mapping + , logger + , getDefaultContextKeyValue()); + } + + @Override + public ReactiveDelete createDelete() { + return new DefaultReactiveDelete(getTable() + , operator.dml().delete(getTable().getFullName()) + , logger + , getDefaultContextKeyValue() + ); + } + + + private Mono setupLogger(Mono async) { + return async.contextWrite(ctx -> ctx.put(Logger.class, logger)); + } + + private Flux setupLogger(Flux async) { + return async.contextWrite(ctx -> ctx.put(Logger.class, logger)); + } + + + @Override + public QueryOperator nativeQuery() { + return operator + .dml() + .query(getTable()); + } +} diff --git a/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/ClickhouseReactiveSqlExecutor.java b/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/ClickhouseReactiveSqlExecutor.java new file mode 100644 index 0000000..f572cde --- /dev/null +++ b/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/ClickhouseReactiveSqlExecutor.java @@ -0,0 +1,151 @@ +package org.hswebframework.ezorm.rdb.supports.clickhouse; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import org.hswebframework.ezorm.rdb.executor.BatchSqlRequest; +import org.hswebframework.ezorm.rdb.executor.DefaultColumnWrapperContext; +import org.hswebframework.ezorm.rdb.executor.SqlRequest; +import org.hswebframework.ezorm.rdb.executor.reactive.ReactiveSqlExecutor; +import org.hswebframework.ezorm.rdb.executor.wrapper.ResultWrapper; +import org.reactivestreams.Publisher; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.util.CollectionUtils; +import org.springframework.web.reactive.function.client.WebClient; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.lang.reflect.Constructor; +import java.util.*; + +/** + * @className ClickhouseRestfulSqlExecutor + * @Description TODO + * @Author dengpengyu + * @Date 2023/9/4 14:40 + * @Vesion 1.0 + */ +public class ClickhouseReactiveSqlExecutor implements ReactiveSqlExecutor { + private Logger log = LoggerFactory.getLogger(ClickhouseReactiveSqlExecutor.class); + private WebClient client; + + public ClickhouseReactiveSqlExecutor(WebClient client) { + this.client = client; + } + + @Override + public Mono update(Publisher request) { + return this + .doExecute(request) + .then(Mono.just(1)); + } + + @Override + public Mono execute(Publisher request) { + return this + .doExecute(request) + .then(); + } + + @Override + public Flux select(Publisher request, ResultWrapper wrapper) { + + return this + .doExecute(request) + .flatMap(response -> convertQueryResult(response, wrapper)); + } + + private Flux doExecute(Publisher requests) { + return Flux + .from(requests) + .expand(request -> { + if (request instanceof BatchSqlRequest) { + return Flux.fromIterable(((BatchSqlRequest) request).getBatch()); + } + return Flux.empty(); + }) + + .filter(SqlRequest::isNotEmpty) + .concatMap(request -> { + String sql; + if (request.toNativeSql().toUpperCase().startsWith("INSERT") + || request.toNativeSql().toUpperCase().startsWith("ALTER")) { + sql = request.toNativeSql(); + } else { + sql = request.toNativeSql() + " FORMAT JSON"; + } + log.trace("Execute ==> {}", sql); + return client + .post() + .bodyValue(sql) + .exchangeToMono(response -> response + .bodyToMono(String.class) + .map(json -> { + JSONObject result = JSON.parseObject(json); + checkExecuteResult(sql, json); + + return result; + })); + }); + } + + private void checkExecuteResult(String sql, String code) { + if (code.startsWith("Code")) { + throw new RuntimeException(code); + } + } + + protected Flux convertQueryResult(JSONObject result, ResultWrapper wrapper) { + + Map columnMeta = new HashMap(); + + JSONArray resultMeta = result.getJSONArray("meta"); + JSONArray resultEntityList = result.getJSONArray("data"); + + if (CollectionUtils.isEmpty(resultMeta) || CollectionUtils.isEmpty(resultEntityList)) { + return Flux.empty(); + } + //把当前列和列类型一一对应 + for (Object o : resultMeta) { + JSONObject e = (JSONObject) o; + columnMeta.put(e.getString("name"), ClickhouseDataType.valueOf(e.getString("type").toUpperCase().replace("NULLABLE(", "").replace(")", ""))); + } + //所有的列名 + ArrayList columns = new ArrayList<>(columnMeta.keySet()); + + return Flux.create(sink -> { + wrapper.beforeWrap(() -> columns); + + for (Object oneObjectEntity : resultEntityList) { + E rowInstance = wrapper.newRowInstance(); + JSONObject oneJsonObjectEntity = (JSONObject) oneObjectEntity; + for (String columnName : columns) { + Object value = oneJsonObjectEntity.get(columnName); + ClickhouseDataType dataType = columnMeta.get(columnName); + try { + Class typeClass = dataType.getJavaType(); + Constructor constructor = typeClass.getConstructor(String.class); + + if (Objects.nonNull(value)) { + value = constructor.newInstance(value.toString()); + } + } catch (Exception e) { + e.printStackTrace(); + } + DefaultColumnWrapperContext context = new DefaultColumnWrapperContext<>(columns.indexOf(columnName), columnName, value, rowInstance); + wrapper.wrapColumn(context); + rowInstance = context.getRowInstance(); + } + if (!wrapper.completedWrapRow(rowInstance)) { + break; + } + if (rowInstance != null) { + sink.next(rowInstance); + } + } + wrapper.completedWrap(); + sink.complete(); + }); + } +} diff --git a/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/ClickhouseSchemaMetadata.java b/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/ClickhouseSchemaMetadata.java new file mode 100644 index 0000000..6572a41 --- /dev/null +++ b/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/ClickhouseSchemaMetadata.java @@ -0,0 +1,53 @@ +package org.hswebframework.ezorm.rdb.supports.clickhouse; + +import org.hswebframework.ezorm.rdb.codec.EnumValueCodec; +import org.hswebframework.ezorm.rdb.metadata.RDBSchemaMetadata; +import org.hswebframework.ezorm.rdb.metadata.RDBTableMetadata; +import org.hswebframework.ezorm.rdb.operator.CompositeExceptionTranslation; + +import org.hswebframework.ezorm.rdb.supports.clickhouse.sqlBuilder.ClickhouseDeleteSqlBuilder; +import org.hswebframework.ezorm.rdb.supports.clickhouse.sqlBuilder.ClickhouseUpdateSqlBuilder; +import org.hswebframework.ezorm.rdb.supports.mysql.*; +import org.hswebframework.ezorm.rdb.utils.FeatureUtils; + +/** + * @className ClickhouseSchemaMetadata + * @Description TODO + * @Author dengpengyu + * @Date 2023/9/5 9:36 + * @Vesion 1.0 + */ +public class ClickhouseSchemaMetadata extends RDBSchemaMetadata { + public ClickhouseSchemaMetadata(String name) { + super(name); + addFeature(new MysqlPaginator()); + // TODO 后续增加建表 + //读取表元数据 + addFeature(new ClickhouseTableMetadataParser(this)); + addFeature(new ClickhouseDialect()); + addFeature(new CompositeExceptionTranslation() + .add(FeatureUtils.r2dbcIsAlive(), () -> MysqlR2DBCExceptionTranslation.of(this)) + .add(MysqlJDBCExceptionTranslation.of(this)) + ); + } + + @Override + public RDBTableMetadata newTable(String name) { + RDBTableMetadata metadata = super.newTable(name); + metadata.addFeature(ClickhouseUpdateSqlBuilder.of(metadata)); + metadata.addFeature(ClickhouseDeleteSqlBuilder.of(metadata)); + metadata.setOnColumnAdded(column -> { + if (column.getValueCodec() instanceof EnumValueCodec && ((EnumValueCodec) column.getValueCodec()).isToMask()) { + column.addFeature(MysqlEnumInFragmentBuilder.in); + column.addFeature(MysqlEnumInFragmentBuilder.notIn); + } + }); + return metadata; + } + + @Override + public void addTable(RDBTableMetadata metadata) { + metadata.addFeature(new MysqlBatchUpsertOperator(metadata)); + super.addTable(metadata); + } +} diff --git a/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/ClickhouseSyncSqlExecutor.java b/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/ClickhouseSyncSqlExecutor.java new file mode 100644 index 0000000..880e64e --- /dev/null +++ b/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/ClickhouseSyncSqlExecutor.java @@ -0,0 +1,177 @@ +package org.hswebframework.ezorm.rdb.supports.clickhouse; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import lombok.AllArgsConstructor; +import lombok.SneakyThrows; +import org.hswebframework.ezorm.core.meta.Feature; +import org.hswebframework.ezorm.rdb.executor.BatchSqlRequest; +import org.hswebframework.ezorm.rdb.executor.DefaultColumnWrapperContext; +import org.hswebframework.ezorm.rdb.executor.SqlRequest; +import org.hswebframework.ezorm.rdb.executor.SyncSqlExecutor; +import org.hswebframework.ezorm.rdb.executor.wrapper.ResultWrapper; +import org.hswebframework.web.crud.configuration.ClickhouseProperties; +import org.reactivestreams.Publisher; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.http.HttpEntity; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpMethod; +import org.springframework.http.ResponseEntity; +import org.springframework.util.CollectionUtils; +import org.springframework.web.client.RestTemplate; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.lang.reflect.Constructor; +import java.util.*; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +/** + * @className ClickhouseReactiveSqlExecutor + * @Description TODO + * @Author zhong + * @Date 2024/1/16 14:55 + * @Vesion 1.0 + */ +@AllArgsConstructor(staticName = "of") +public class ClickhouseSyncSqlExecutor implements SyncSqlExecutor { + private Logger log = LoggerFactory.getLogger(ClickhouseSyncSqlExecutor.class); + + private RestTemplate restTemplate; + + private String url; + + private HttpHeaders headers; + + public ClickhouseSyncSqlExecutor(ClickhouseProperties clickhouseProperties) { + restTemplate =new RestTemplate(); + this.url=clickhouseProperties.getUrl(); + headers = new HttpHeaders(); + headers.add("X-ClickHouse-User", clickhouseProperties.getUsername()); + headers.add("X-ClickHouse-Key", clickhouseProperties.getPassword()); + } + + public static Feature of(ClickhouseProperties clickhouseProperties) { + return new ClickhouseSyncSqlExecutor(clickhouseProperties); + } + + + + @Override + @SneakyThrows + public int update(SqlRequest request) { + return this + .doExecute(Mono.just(request)) + .then(Mono.just(1)).toFuture().get(30, TimeUnit.SECONDS); + + } + + @Override + @SneakyThrows + public void execute(SqlRequest request) { + + this + .doExecute(Mono.just(request)) + .then() + .toFuture().get(30,TimeUnit.SECONDS); + + } + + @Override + @SneakyThrows + public R select(SqlRequest request, ResultWrapper wrapper) { + this + .doExecute(Mono.just(request)) + .flatMap(response -> convertQueryResult(response, wrapper)) + .collectList().toFuture().get(30,TimeUnit.SECONDS); + + return wrapper.getResult(); + + } + + + private Flux doExecute(Publisher requests) { + return Flux + .from(requests) + .expand(request -> { + if (request instanceof BatchSqlRequest) { + return Flux.fromIterable(((BatchSqlRequest) request).getBatch()); + } + return Flux.empty(); + }) + + .filter(SqlRequest::isNotEmpty) + .concatMap(request -> { + String sql; + if (request.toNativeSql().toUpperCase().startsWith("INSERT") + || request.toNativeSql().toUpperCase().startsWith("ALTER")) { + sql = request.toNativeSql(); + } else { + sql = request.toNativeSql() + " FORMAT JSON"; + } + log.info("Execute ==> {}", sql); + HttpEntity requestEntity = new HttpEntity<>(sql, headers); + ResponseEntity responseEntity = restTemplate.exchange(url, HttpMethod.POST, requestEntity, String.class); + JSONObject result = JSON.parseObject(responseEntity.getBody()); + + return Mono.just(result); + }) + ; + } + + protected Flux convertQueryResult(JSONObject result, ResultWrapper wrapper) { + + Map columnMeta = new HashMap(); + + JSONArray resultMeta = result.getJSONArray("meta"); + JSONArray resultEntityList = result.getJSONArray("data"); + + if (CollectionUtils.isEmpty(resultMeta) || CollectionUtils.isEmpty(resultEntityList)) { + return Flux.empty(); + } + //把当前列和列类型一一对应 + for (Object o : resultMeta) { + JSONObject e = (JSONObject) o; + columnMeta.put(e.getString("name"), ClickhouseDataType.valueOf(e.getString("type").toUpperCase().replace("NULLABLE(", "").replace(")", ""))); + } + //所有的列名 + ArrayList columns = new ArrayList<>(columnMeta.keySet()); + + return Flux.create(sink -> { + wrapper.beforeWrap(() -> columns); + + for (Object oneObjectEntity : resultEntityList) { + E rowInstance = wrapper.newRowInstance(); + JSONObject oneJsonObjectEntity = (JSONObject) oneObjectEntity; + for (String columnName : columns) { + Object value = oneJsonObjectEntity.get(columnName); + ClickhouseDataType dataType = columnMeta.get(columnName); + try { + Class typeClass = dataType.getJavaType(); + Constructor constructor = typeClass.getConstructor(String.class); + + if (Objects.nonNull(value)) { + value = constructor.newInstance(value.toString()); + } + } catch (Exception e) { + e.printStackTrace(); + } + DefaultColumnWrapperContext context = new DefaultColumnWrapperContext<>(columns.indexOf(columnName), columnName, value, rowInstance); + wrapper.wrapColumn(context); + rowInstance = context.getRowInstance(); + } + if (!wrapper.completedWrapRow(rowInstance)) { + break; + } + if (rowInstance != null) { + sink.next(rowInstance); + } + } + wrapper.completedWrap(); + sink.complete(); + }); + }} + diff --git a/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/ClickhouseTableMetadataParser.java b/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/ClickhouseTableMetadataParser.java new file mode 100644 index 0000000..40e7197 --- /dev/null +++ b/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/ClickhouseTableMetadataParser.java @@ -0,0 +1,147 @@ +package org.hswebframework.ezorm.rdb.supports.clickhouse; + +import org.hswebframework.ezorm.rdb.executor.reactive.ReactiveSqlExecutor; +import org.hswebframework.ezorm.rdb.mapping.defaults.record.RecordResultWrapper; +import org.hswebframework.ezorm.rdb.metadata.RDBColumnMetadata; +import org.hswebframework.ezorm.rdb.metadata.RDBIndexMetadata; +import org.hswebframework.ezorm.rdb.metadata.RDBSchemaMetadata; +import org.hswebframework.ezorm.rdb.metadata.RDBTableMetadata; +import org.hswebframework.ezorm.rdb.metadata.parser.IndexMetadataParser; +import org.hswebframework.ezorm.rdb.supports.commons.RDBTableMetadataParser; +import org.hswebframework.utils.StringUtils; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hswebframework.ezorm.rdb.executor.SqlRequests.template; +import static org.hswebframework.ezorm.rdb.executor.wrapper.ResultWrappers.singleMap; + +/** + * @className Clickhouse + * @Description TODO + * @Author dengpengyu + * @Date 2023/9/4 14:56 + * @Vesion 1.0 + */ +public class ClickhouseTableMetadataParser extends RDBTableMetadataParser { + private static final String TABLE_META_SQL = String.join(" ", + "select", + "column_name as `name`,", + "data_type as `data_type`,", + "character_maximum_length as `data_length`,", + "numeric_precision as `data_precision`,", + "numeric_scale as `data_scale`,", + "column_comment as `comment`,", + "table_name as `table_name`,", + "case is_nullable when 0 then 0 else 1 end ", + "from information_schema.columns where table_schema=#{schema} and table_name like #{table}"); + + private static final String TABLE_COMMENT_SQL = String.join(" ", + "select ", + "`comment`", + ",name as `table_name`", + "from system.tables where database=#{schema} and name like #{table}"); + + + private static final String ALL_TABLE_SQL = "select table_name as `name` from system.tables where database=#{schema}"; + + private static final String TABLE_EXISTS_SQL = "select count() as `total` from system.tables where database=#{schema} and name=#{table}"; + + public ClickhouseTableMetadataParser(RDBSchemaMetadata schema) { + super(schema); + } + + @Override + protected String getTableMetaSql(String name) { + return TABLE_META_SQL; + } + + @Override + protected String getTableCommentSql(String name) { + return TABLE_COMMENT_SQL; + } + + @Override + protected String getAllTableSql() { + return ALL_TABLE_SQL; + } + + @Override + public String getTableExistsSql() { + return TABLE_EXISTS_SQL; + } + + @Override + public List parseAll() { + return super.fastParseAll(); + } + + @Override + public Flux parseAllReactive() { + return super.fastParseAllReactive(); + } + @Override + public Mono parseByNameReactive(String name) { + return tableExistsReactive(name) + .filter(Boolean::booleanValue) + .flatMap(ignore -> { + RDBTableMetadata metaData = createTable(name); + metaData.setName(name); + metaData.setAlias(name); + Map param = new HashMap<>(); + param.put("table", name); + param.put("schema", schema.getName()); + ReactiveSqlExecutor reactiveSqlExecutor = getReactiveSqlExecutor(); + //列 + Mono> columns = reactiveSqlExecutor + .select(template(getTableMetaSql(null), param), new RecordResultWrapper()) + .map(record -> { + RDBColumnMetadata column = metaData.newColumn(); + applyColumnInfo(column, record); + if (!StringUtils.isNullOrEmpty(column.getAlias())){ + column.setAlias(getAlias(column.getName())); + } + metaData.addColumn(column); + return column; + }) + .collectList(); + //注释 + Mono> comments = reactiveSqlExecutor + .select(template(getTableCommentSql(name), param), singleMap()) + .doOnNext(comment -> metaData.setComment(String.valueOf(comment.get("comment")))) + .singleOrEmpty(); + + //加载索引 + Flux index = schema.findFeature(IndexMetadataParser.ID) + .map(parser -> parser.parseTableIndexReactive(name)) + .orElseGet(Flux::empty) + .doOnNext(metaData::addIndex); + + return Flux + .merge(columns, comments, index) + .then(Mono.just(metaData)); + }) + ; + } + private String getAlias(String name) {//处理驼峰命名 + StringBuilder result = new StringBuilder(); + boolean nextUpperCase = false; + for (int i = 0; i < name.length(); i++) { + char currentChar = name.charAt(i); + if (currentChar == '_') { + nextUpperCase = true; + } else { + if (nextUpperCase) { + result.append(Character.toUpperCase(currentChar)); + nextUpperCase = false; + } else { + result.append(Character.toLowerCase(currentChar)); + } + } + } + return result.toString(); + } +} diff --git a/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/Helper.java b/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/Helper.java new file mode 100644 index 0000000..1871b42 --- /dev/null +++ b/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/Helper.java @@ -0,0 +1,91 @@ +package org.hswebframework.ezorm.rdb.supports.clickhouse; + +import lombok.SneakyThrows; +import org.hswebframework.ezorm.rdb.executor.reactive.ReactiveSqlExecutor; +import org.hswebframework.ezorm.rdb.executor.reactive.ReactiveSyncSqlExecutor; +import org.hswebframework.ezorm.rdb.mapping.EntityColumnMapping; +import org.hswebframework.ezorm.rdb.mapping.MappingFeatureType; +import org.hswebframework.ezorm.rdb.mapping.ReactiveRepository; +import org.hswebframework.ezorm.rdb.mapping.defaults.DefaultReactiveRepository; +import org.hswebframework.ezorm.rdb.mapping.jpa.JpaEntityTableMetadataParser; +import org.hswebframework.ezorm.rdb.mapping.wrapper.EntityResultWrapper; +import org.hswebframework.ezorm.rdb.metadata.RDBDatabaseMetadata; +import org.hswebframework.ezorm.rdb.metadata.RDBSchemaMetadata; +import org.hswebframework.ezorm.rdb.metadata.RDBTableMetadata; +import org.hswebframework.ezorm.rdb.metadata.dialect.Dialect; +import org.hswebframework.ezorm.rdb.operator.DatabaseOperator; +import org.hswebframework.ezorm.rdb.operator.DefaultDatabaseOperator; +import org.hswebframework.web.crud.configuration.ClickhouseProperties; + +import java.util.function.Supplier; + +/** + * @author dengpengyu + * @date 2023/9/21 14:20 + */ +public interface Helper { + + RDBSchemaMetadata getRDBSchemaMetadata(); + + Dialect getDialect(); + + ReactiveSqlExecutor getReactiveSqlExecutor(); + + ClickhouseProperties getClickhouseProperties(); + + default RDBDatabaseMetadata getRDBDatabaseMetadata() { + RDBDatabaseMetadata metadata = new RDBDatabaseMetadata(getDialect()); + + RDBSchemaMetadata schema = getRDBSchemaMetadata(); + + ReactiveSqlExecutor sqlExecutor = getReactiveSqlExecutor(); + + metadata.setCurrentSchema(schema); + metadata.addSchema(schema); + metadata.addFeature(sqlExecutor); + metadata.addFeature(ClickhouseSyncSqlExecutor.of(getClickhouseProperties())); + + return metadata; + } + + @SneakyThrows + default ReactiveRepository createRepository(Class clazz) { + RDBDatabaseMetadata metadata = getRDBDatabaseMetadata(); + DatabaseOperator operator = DefaultDatabaseOperator.of(metadata); + + JpaEntityTableMetadataParser parser = new JpaEntityTableMetadataParser(); + parser.setDatabaseMetadata(metadata); + /* parser.parseTableMetadata(clazz) + .ifPresent(address -> { + operator.ddl() + .createOrAlter(address) + .commit() + .reactive() + .block(); + });*/ + + RDBTableMetadata table = parser + .parseTableMetadata(clazz) + .orElseThrow(NullPointerException::new); + +// operator.ddl() +// .createOrAlter(table) +// .commit() +// .reactive() +// .block(); + + Supplier supplier = new Supplier() { + @SneakyThrows + @Override + public Object get() { + return clazz.newInstance(); + } + }; + EntityResultWrapper wrapper = new EntityResultWrapper(supplier); + wrapper.setMapping(table + .getFeature(MappingFeatureType.columnPropertyMapping.createFeatureId(clazz)) + .orElseThrow(NullPointerException::new)); + return new ClickhouseReactiveDefaultRepository<>(operator, table, clazz, wrapper); + } + +} diff --git a/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/sqlBuilder/ClickhouseDeleteSqlBuilder.java b/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/sqlBuilder/ClickhouseDeleteSqlBuilder.java new file mode 100644 index 0000000..46c2c67 --- /dev/null +++ b/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/sqlBuilder/ClickhouseDeleteSqlBuilder.java @@ -0,0 +1,84 @@ +package org.hswebframework.ezorm.rdb.supports.clickhouse.sqlBuilder; + +import lombok.AllArgsConstructor; +import org.apache.commons.collections.CollectionUtils; +import org.hswebframework.ezorm.core.param.Term; +import org.hswebframework.ezorm.rdb.executor.SqlRequest; +import org.hswebframework.ezorm.rdb.metadata.RDBTableMetadata; +import org.hswebframework.ezorm.rdb.metadata.key.ForeignKeyMetadata; +import org.hswebframework.ezorm.rdb.operator.builder.fragments.*; +import org.hswebframework.ezorm.rdb.operator.builder.fragments.delete.DeleteSqlBuilder; +import org.hswebframework.ezorm.rdb.operator.builder.fragments.term.ForeignKeyTermFragmentBuilder; +import org.hswebframework.ezorm.rdb.operator.dml.delete.DeleteOperatorParameter; + +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; + + +/** + * @className ClickhouseDeleteSqlBuilder + * @Description TODO + * @Author dengpengyu + * @Date 2023/9/6 10:06 + * @Vesion 1.0 + */ +@AllArgsConstructor(staticName = "of") +@SuppressWarnings("all") +public class ClickhouseDeleteSqlBuilder extends AbstractTermsFragmentBuilder implements DeleteSqlBuilder { + + private RDBTableMetadata table; + + @Override + public SqlRequest build(DeleteOperatorParameter parameter) { + if (CollectionUtils.isEmpty(parameter.getWhere())) { + throw new UnsupportedOperationException("Unsupported No Conditions delete"); + } + + PrepareSqlFragments fragments = PrepareSqlFragments.of(); + fragments.addSql("ALTER TABLE", table.getFullName(), "DELETE WHERE"); + + SqlFragments where = createTermFragments(parameter, parameter.getWhere()); + if (where.isEmpty()) { + throw new UnsupportedOperationException("Unsupported No Conditions delete"); + } + fragments.addFragments(where); + + return fragments.toRequest(); + } + + @Override + protected SqlFragments createTermFragments(DeleteOperatorParameter parameter, Term term) { + String columnName = term.getColumn(); + if (columnName == null) { + return EmptySqlFragments.INSTANCE; + } + + if (columnName.contains(".")) { + String[] arr = columnName.split("[.]"); + if (table.equalsNameOrAlias(arr[0])) { + columnName = arr[1]; + } else { + return table.getForeignKey(arr[0]) + .flatMap(key -> table.findFeature(ForeignKeyTermFragmentBuilder.ID) + .map(builder -> builder.createFragments(table.getName(), key, createForeignKeyTerm(key, term)))) + .orElse(EmptySqlFragments.INSTANCE); + } + } + + return table + .getColumn(columnName) + .flatMap(column -> column + .findFeature(TermFragmentBuilder.createFeatureId(term.getTermType())) + .map(termFragment -> termFragment.createFragments(column.getQuoteName(), column, term))) + .orElse(EmptySqlFragments.INSTANCE); + } + + protected List createForeignKeyTerm(ForeignKeyMetadata keyMetadata, Term term) { + Term copy = term.clone(); + //只要是嵌套到外键表的条件则认为是关联表的条件 + term.setTerms(new LinkedList<>()); + + return Collections.singletonList(copy); + } +} diff --git a/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/sqlBuilder/ClickhouseUpdateSqlBuilder.java b/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/sqlBuilder/ClickhouseUpdateSqlBuilder.java new file mode 100644 index 0000000..9b82886 --- /dev/null +++ b/src/main/java/org/hswebframework/ezorm/rdb/supports/clickhouse/sqlBuilder/ClickhouseUpdateSqlBuilder.java @@ -0,0 +1,151 @@ +package org.hswebframework.ezorm.rdb.supports.clickhouse.sqlBuilder; + +import lombok.AllArgsConstructor; +import lombok.Getter; +import org.apache.commons.collections.CollectionUtils; +import org.hswebframework.ezorm.core.param.Term; +import org.hswebframework.ezorm.rdb.executor.EmptySqlRequest; +import org.hswebframework.ezorm.rdb.executor.SqlRequest; +import org.hswebframework.ezorm.rdb.metadata.RDBColumnMetadata; +import org.hswebframework.ezorm.rdb.metadata.RDBTableMetadata; +import org.hswebframework.ezorm.rdb.metadata.key.ForeignKeyMetadata; +import org.hswebframework.ezorm.rdb.operator.builder.fragments.*; +import org.hswebframework.ezorm.rdb.operator.builder.fragments.term.ForeignKeyTermFragmentBuilder; +import org.hswebframework.ezorm.rdb.operator.builder.fragments.update.UpdateSqlBuilder; +import org.hswebframework.ezorm.rdb.operator.dml.update.UpdateColumn; +import org.hswebframework.ezorm.rdb.operator.dml.update.UpdateOperatorParameter; + +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; + +import static java.util.Optional.ofNullable; +import static org.hswebframework.ezorm.rdb.operator.builder.fragments.function.FunctionFragmentBuilder.createFeatureId; +/** + * @author dengpengyu + */ + +/** + * @className ClickhouseUpdateSqlBuilder + * @Description TODO + * @Author dengpengyu + * @Date 2023/9/6 9:56 + * @Vesion 1.0 + */ +@AllArgsConstructor(staticName = "of") +@SuppressWarnings("all") +public class ClickhouseUpdateSqlBuilder extends AbstractTermsFragmentBuilder implements UpdateSqlBuilder { + @Getter + private RDBTableMetadata table; + + @Override + public SqlRequest build(UpdateOperatorParameter parameter) { + + if (CollectionUtils.isEmpty(parameter.getColumns())) { + return EmptySqlRequest.INSTANCE; + } + if (CollectionUtils.isEmpty(parameter.getWhere())) { + throw new UnsupportedOperationException("unsupported no conditions update"); + } + + PrepareSqlFragments fragments = PrepareSqlFragments.of(); + + fragments.addSql("ALTER TABLE", table.getFullName(), "UPDATE"); + + int index = 0; + for (UpdateColumn column : parameter.getColumns()) { + SqlFragments columnFragments = table.getColumn(column.getColumn()) + .filter(RDBColumnMetadata::isUpdatable) + .map(columnMetadata -> { + Object value = column.getValue(); + if (value == null) { + return EmptySqlFragments.INSTANCE; + } + + PrepareSqlFragments sqlFragments = PrepareSqlFragments.of(); + sqlFragments.addSql(columnMetadata.getQuoteName(), "="); + + if (column instanceof NativeSql) { + return PrepareSqlFragments.of() + .addSql(((NativeSql) column).getSql()) + .addParameter(((NativeSql) column).getParameters()); + } + if (value instanceof NativeSql) { + return PrepareSqlFragments.of() + .addSql(columnMetadata.getQuoteName(), "=") + .addSql(((NativeSql) column.getValue()).getSql()) + .addParameter(((NativeSql) column.getValue()).getParameters()); + } + + sqlFragments.addFragments(ofNullable(column.getFunction()) + .flatMap(function -> columnMetadata.findFeature(createFeatureId(function))) + .map(builder -> builder.create(columnMetadata.getName(), columnMetadata, column)) + .orElseGet(() -> PrepareSqlFragments.of() + .addSql("?") + .addParameter(columnMetadata.encode(value)))); + + return sqlFragments; + + }).orElse(EmptySqlFragments.INSTANCE); + + + if (columnFragments.isNotEmpty()) { + if (index++ != 0) { + fragments.addSql(","); + } + fragments.addFragments(columnFragments); + + } + } + if (index == 0) { + throw new UnsupportedOperationException("No columns are updated"); + } + fragments.addSql("where"); + + SqlFragments where = createTermFragments(parameter, parameter.getWhere()); + + if (where.isEmpty()) { + throw new UnsupportedOperationException("Unsupported No Conditions update"); + } + + fragments.addFragments(where); + + return fragments.toRequest(); + } + + @Override + protected SqlFragments createTermFragments(UpdateOperatorParameter parameter, Term term) { + String columnName = term.getColumn(); + if (columnName == null) { + return EmptySqlFragments.INSTANCE; + } + + if (columnName.contains(".")) { + String[] arr = columnName.split("[.]"); + if (table.equalsNameOrAlias(arr[0])) { + columnName = arr[1]; + } else { + return table.getForeignKey(arr[0]) + .flatMap(key -> key.getSource() + .findFeature(ForeignKeyTermFragmentBuilder.ID) + .map(builder -> builder.createFragments(table.getName(), key, createForeignKeyTerm(key, term)))) + .orElse(EmptySqlFragments.INSTANCE); + } + } + + return table + .getColumn(columnName) + .flatMap(column -> column + .findFeature(TermFragmentBuilder.createFeatureId(term.getTermType())) + .map(termFragment -> termFragment.createFragments(column.getQuoteName(), column, term))) + .orElse(EmptySqlFragments.INSTANCE); + } + + protected List createForeignKeyTerm(ForeignKeyMetadata keyMetadata, Term term) { + Term copy = term.clone(); + //只要是嵌套到外键表的条件则认为是关联表的条件 + term.setTerms(new LinkedList<>()); + + return Collections.singletonList(copy); + } +} diff --git a/src/main/java/org/hswebframework/service/ClickhouseReactiveCrudService.java b/src/main/java/org/hswebframework/service/ClickhouseReactiveCrudService.java new file mode 100644 index 0000000..d1a7454 --- /dev/null +++ b/src/main/java/org/hswebframework/service/ClickhouseReactiveCrudService.java @@ -0,0 +1,35 @@ +package org.hswebframework.service; + + +import org.hswebframework.ezorm.rdb.mapping.ReactiveRepository; +import org.hswebframework.ezorm.rdb.supports.clickhouse.ClickhouseHelper; +import org.hswebframework.web.crud.service.ReactiveCrudService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * @author dengpengyu + * @date 2023/9/20 11:38 + */ +public abstract class ClickhouseReactiveCrudService implements ReactiveCrudService { + +// private Class clazz; + + private ReactiveRepository repository; + + @Autowired + private ClickhouseHelper clickhouseHelper; + +// public ClickhouseReactiveCrudService(Class clazz) { +// this.clazz = clazz; +// } + + @Override + public ReactiveRepository getRepository() { + if (repository == null) { + repository = clickhouseHelper.createRepository(getEntityClass()); + } + return repository; + } + + public abstract Class getEntityClass(); +} diff --git a/src/main/java/org/hswebframework/web/crud/configuration/ClickhouseProperties.java b/src/main/java/org/hswebframework/web/crud/configuration/ClickhouseProperties.java new file mode 100644 index 0000000..fc77de2 --- /dev/null +++ b/src/main/java/org/hswebframework/web/crud/configuration/ClickhouseProperties.java @@ -0,0 +1,24 @@ +package org.hswebframework.web.crud.configuration; + +import lombok.Data; +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.stereotype.Component; + +/** + * @author dengpengyu + * @date 2023/9/13 15:29 + */ +@Data +@ConfigurationProperties(prefix = "spring.clickhouse") +@Component +public class +ClickhouseProperties { + + private String url; + + private String database; + + private String username; + + private String password; +} diff --git a/src/main/java/org/hswebframework/web/crud/configuration/EasyormConfigurationBeanDefinitionRegistryPostProcessor.java b/src/main/java/org/hswebframework/web/crud/configuration/EasyormConfigurationBeanDefinitionRegistryPostProcessor.java new file mode 100644 index 0000000..ae89ef5 --- /dev/null +++ b/src/main/java/org/hswebframework/web/crud/configuration/EasyormConfigurationBeanDefinitionRegistryPostProcessor.java @@ -0,0 +1,46 @@ +package org.hswebframework.web.crud.configuration; + +import org.hswebframework.ezorm.rdb.mapping.defaults.DefaultReactiveRepository; +import org.hswebframework.utils.StringUtils; +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; +import org.springframework.beans.factory.support.BeanDefinitionRegistry; +import org.springframework.beans.factory.support.BeanDefinitionRegistryPostProcessor; +import org.springframework.beans.factory.support.DefaultListableBeanFactory; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationContextAware; +import org.springframework.core.env.Environment; +import org.springframework.stereotype.Component; + +import java.util.Arrays; + +/** + * @className EasyormConfigurationBeanDefinitionRegistryPostProcessor + * @Description TODO + * @Author zhong + * @Date 2023/10/8 14:07 + * @Vesion 1.0 + */ +@Component +public class EasyormConfigurationBeanDefinitionRegistryPostProcessor implements BeanDefinitionRegistryPostProcessor, ApplicationContextAware { + private ApplicationContext applicationContext; + @Override + public void postProcessBeanDefinitionRegistry(BeanDefinitionRegistry registry) throws BeansException { + Environment environment =applicationContext.getEnvironment(); + if (StringUtils.isNullOrEmpty(environment.getProperty("easyorm.dialect"))){ + String[] beanNames = ((DefaultListableBeanFactory) registry).getBeanNamesForType(DefaultReactiveRepository.class); + Arrays.stream(beanNames).forEach(item-> registry.removeBeanDefinition(item)); +// registry.removeBeanDefinition("org.hswebframework.web.crud.configuration.AutoDDLProcessor_1"); + } + } + + @Override + public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException { + } + + @Override + public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { + this.applicationContext=applicationContext; + } +} diff --git a/src/main/resources/META-INF/spring.factories b/src/main/resources/META-INF/spring.factories new file mode 100644 index 0000000..556ddf6 --- /dev/null +++ b/src/main/resources/META-INF/spring.factories @@ -0,0 +1,4 @@ +# Auto Configure +org.springframework.boot.autoconfigure.EnableAutoConfiguration=\ + org.hswebframework.ezorm.rdb.supports.clickhouse.ClickhouseHelper,\ + org.hswebframework.web.crud.configuration.EasyormConfigurationBeanDefinitionRegistryPostProcessor \ No newline at end of file