Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
f0645b1
added basic dynamic support for rowbinary reader
chernser Feb 3, 2025
96102c9
implemented reading in rowbinary format for json and dynamic
chernser Feb 4, 2025
dc82be7
Merge branch 'main' into v2_support_dynamic_and_json
chernser Feb 5, 2025
4f8de9d
fixed tests for ch versions
chernser Feb 5, 2025
2ae4d27
implemented dynamic basic types
chernser Feb 8, 2025
f18ce46
Merge branch 'main' into v2_support_dynamic_and_json
chernser Feb 8, 2025
af28fca
implemented interval and partially decimal
chernser Feb 8, 2025
d2b6d59
almost all types are working
chernser Feb 10, 2025
9c3a6ed
fixed array handling when Dynamic
chernser Feb 10, 2025
beae9f4
fixed maps and decimals
chernser Feb 10, 2025
35f0b31
Fixed dynamic primitives. Dates left
chernser Feb 11, 2025
c6335d7
fixed enums for different releases
chernser Feb 11, 2025
629c0be
Merge branch 'main' into v2_support_dynamic_and_json
chernser Feb 11, 2025
dbee722
fixed arrays
chernser Feb 12, 2025
455b367
enabled JDBC tests for variant and dynamic
chernser Feb 12, 2025
de27d86
fixed dates, added test for POJO read
chernser Feb 13, 2025
48a0b09
Merge branch 'main' into v2_support_dynamic_and_json
chernser Feb 13, 2025
e969608
fixed some tests
chernser Feb 13, 2025
7741b2f
fixed more tests
chernser Feb 13, 2025
170674e
Merge branch 'main' into v2_support_dynamic_and_json
chernser Feb 13, 2025
b93815a
fixed proper settings for cloud
chernser Feb 13, 2025
1b81ce5
one more test fix
chernser Feb 13, 2025
eb68c76
Fixed missing isSecure for cloud tests
chernser Feb 13, 2025
775d36d
Merge branch 'main' into v2_support_dynamic_and_json
chernser Feb 13, 2025
296d676
addressed some issues
chernser Feb 13, 2025
17b8770
added interval serialization/deserialization
chernser Feb 14, 2025
edc87a1
fixed Period serialization
chernser Feb 14, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ jobs:
matrix:
# most recent LTS releases as well as latest stable builds
# https://github.com/ClickHouse/ClickHouse/pulls?q=is%3Aopen+is%3Apr+label%3Arelease
clickhouse: ["23.8", "24.3", "24.8", "latest"]
clickhouse: ["24.3", "24.8", "latest"]
project: ["clickhouse-http-client", "client-v2"]
fail-fast: false
timeout-minutes: 15
Expand Down Expand Up @@ -268,7 +268,7 @@ jobs:
needs: compile
strategy:
matrix:
clickhouse: ["23.8", "24.3", "24.6", "latest"]
clickhouse: ["24.3", "24.8", "latest"]
# here http, http_client and apache_http_client represent different value of http_connection_provider
# protocol: ["http", "http_client", "apache_http_client"]
protocol: ["apache_http_client"]
Expand Down Expand Up @@ -321,7 +321,7 @@ jobs:
uses: actions/upload-artifact@v4
if: failure()
with:
name: result ${{ github.job }}
name: result ${{ github.job }}_${{ matrix.project }}_${{ matrix.clickhouse }}
path: |
**/target/failsafe-reports
**/target/surefire-reports
Expand All @@ -331,7 +331,7 @@ jobs:
needs: compile
strategy:
matrix:
clickhouse: ["23.8", "24.3", "24.8", "latest"]
clickhouse: ["24.3", "24.8", "latest"]
# grpc is not fully supported, and http_client and apache_http_client do not work in CI environment(due to limited threads?)
protocol: ["http"]
r2dbc: ["1.0.0.RELEASE", "0.9.1.RELEASE"]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -358,6 +358,7 @@ public static void beforeSuite() {
@AfterSuite(groups = {"integration"})
public static void afterSuite() {
if (clickhouseContainer != null) {
// clickhouseContainer.copyFileFromContainer("/var/log/clickhouse-server/clickhouse-server.log", "server-container.log");
clickhouseContainer.stop();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -640,8 +640,13 @@ public static List<ClickHouseColumn> parse(String args) {
return Collections.unmodifiableList(c);
}

private ClickHouseColumn(ClickHouseDataType dataType, String columnName, String originalTypeName, boolean nullable,
public ClickHouseColumn(ClickHouseDataType dataType, String columnName, String originalTypeName, boolean nullable,
boolean lowCardinality, List<String> parameters, List<ClickHouseColumn> nestedColumns) {
this(dataType, columnName, originalTypeName, nullable, lowCardinality, parameters, nestedColumns, ClickHouseEnum.EMPTY);
}

public ClickHouseColumn(ClickHouseDataType dataType, String columnName, String originalTypeName, boolean nullable,
boolean lowCardinality, List<String> parameters, List<ClickHouseColumn> nestedColumns, ClickHouseEnum enumConstants) {
this.aggFuncType = null;
this.dataType = ClickHouseChecker.nonNull(dataType, "dataType");

Expand Down Expand Up @@ -671,6 +676,7 @@ private ClickHouseColumn(ClickHouseDataType dataType, String columnName, String

this.fixedByteLength = false;
this.estimatedByteLength = 0;
this.enumConstants = enumConstants;
}

/**
Expand Down

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ protected ClickHouseEnum(Collection<String> params) {
}
}

protected ClickHouseEnum(String[] names, int[] values) {
public ClickHouseEnum(String[] names, int[] values) {
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

no need to protect the constructor of such simple class.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

will it be more efficient to use a map here i see a lot scan operations that we have

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Current approach is more memory efficient. We can introduce map and measure performance.

if (names == null || values == null) {
throw new IllegalArgumentException("Non-null names and values are required");
} else if (names.length != values.length) {
Expand Down Expand Up @@ -93,6 +93,16 @@ public String name(int value) {
throw new IllegalArgumentException("Unknown enum value: " + value);
}

public String nameNullable(int value) {
for (int i = 0; i < size; i++) {
if (values[i] == value) {
return names[i];
}
}

return null;
}

public int value(String name) {
for (int i = 0; i < size; i++) {
if (names[i].equals(name)) {
Expand Down Expand Up @@ -135,4 +145,16 @@ public String toSqlException() {
}
return builder.toString();
}

public int size() {
return size;
}

public String[] getNames() {
return names;
}

public int[] getValues() {
return values;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import java.net.Inet4Address;
import java.net.Inet6Address;
import java.time.*;
import java.time.temporal.TemporalAmount;
import java.util.List;
import java.util.Map;
import java.util.UUID;
Expand Down Expand Up @@ -540,4 +541,8 @@ public interface ClickHouseBinaryFormatReader extends AutoCloseable {
ClickHouseBitmap getClickHouseBitmap(String colName);

ClickHouseBitmap getClickHouseBitmap(int index);

TemporalAmount getTemporalAmount(int index);

TemporalAmount getTemporalAmount(String colName);
}
Original file line number Diff line number Diff line change
Expand Up @@ -207,6 +207,8 @@ public static boolean writeValuePreamble(OutputStream out, boolean defaultsSuppo
return false;//And we're done
} else if (dataType == ClickHouseDataType.Array) {//If the column is an array
SerializerUtils.writeNonNull(out);//Then we send nonNull
} else if (dataType == ClickHouseDataType.Dynamic) {
// do nothing
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

if we can add an explanation why we are not doing here anything

} else {
throw new IllegalArgumentException(String.format("An attempt to write null into not nullable column '%s'", column));
}
Expand All @@ -221,6 +223,8 @@ public static boolean writeValuePreamble(OutputStream out, boolean defaultsSuppo
} else if (value == null) {
if (dataType == ClickHouseDataType.Array) {
SerializerUtils.writeNonNull(out);
} else if (dataType == ClickHouseDataType.Dynamic) {
// do nothing
} else {
throw new IllegalArgumentException(String.format("An attempt to write null into not nullable column '%s'", column));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import java.time.*;
import java.time.format.DateTimeFormatter;
import java.time.temporal.ChronoUnit;
import java.time.temporal.TemporalAmount;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
Expand Down Expand Up @@ -247,6 +248,7 @@ protected void setSchema(TableSchema schema) {
case Enum8:
case Enum16:
case Variant:
case Dynamic:
this.convertions[i] = NumberConverter.NUMBER_CONVERTERS;
break;
default:
Expand Down Expand Up @@ -285,9 +287,11 @@ public static String readAsString(Object value, ClickHouseColumn column) {
return zdt.format(com.clickhouse.client.api.DataTypeUtils.DATE_FORMATTER);
}
return value.toString();
} else if (value instanceof BinaryStreamReader.EnumValue) {
return ((BinaryStreamReader.EnumValue)value).name;
} else if (value instanceof Number ) {
ClickHouseDataType dataType = column.getDataType();
int num = ((Number)value).intValue();
int num = ((Number) value).intValue();
if (column.getDataType() == ClickHouseDataType.Variant) {
for (ClickHouseColumn c : column.getNestedColumns()) {
// TODO: will work only if single enum listed as variant
Expand Down Expand Up @@ -411,38 +415,13 @@ public ZonedDateTime getZonedDateTime(String colName) {

@Override
public Duration getDuration(String colName) {
int colIndex = schema.nameToIndex(colName);
ClickHouseColumn column = schema.getColumns().get(colIndex);
BigInteger value = readValue(colName);
try {
switch (column.getDataType()) {
case IntervalYear:
return Duration.of(value.longValue(), java.time.temporal.ChronoUnit.YEARS);
case IntervalQuarter:
return Duration.of(value.longValue() * 3, java.time.temporal.ChronoUnit.MONTHS);
case IntervalMonth:
return Duration.of(value.longValue(), java.time.temporal.ChronoUnit.MONTHS);
case IntervalWeek:
return Duration.of(value.longValue(), ChronoUnit.WEEKS);
case IntervalDay:
return Duration.of(value.longValue(), java.time.temporal.ChronoUnit.DAYS);
case IntervalHour:
return Duration.of(value.longValue(), java.time.temporal.ChronoUnit.HOURS);
case IntervalMinute:
return Duration.of(value.longValue(), java.time.temporal.ChronoUnit.MINUTES);
case IntervalSecond:
return Duration.of(value.longValue(), java.time.temporal.ChronoUnit.SECONDS);
case IntervalMicrosecond:
return Duration.of(value.longValue(), java.time.temporal.ChronoUnit.MICROS);
case IntervalMillisecond:
return Duration.of(value.longValue(), java.time.temporal.ChronoUnit.MILLIS);
case IntervalNanosecond:
return Duration.of(value.longValue(), java.time.temporal.ChronoUnit.NANOS);
}
} catch (ArithmeticException e) {
throw new ClientException("Stored value is bigger then Long.MAX_VALUE and it cannot be converted to Duration without information loss", e);
}
throw new ClientException("Column of type " + column.getDataType() + " cannot be converted to Duration");
TemporalAmount temporalAmount = getTemporalAmount(colName);
return Duration.from(temporalAmount);
}

@Override
public TemporalAmount getTemporalAmount(String colName) {
return readValue(colName);
}

@Override
Expand Down Expand Up @@ -603,7 +582,12 @@ public ZonedDateTime getZonedDateTime(int index) {

@Override
public Duration getDuration(int index) {
return readValue(index);
return getDuration(schema.columnIndexToName(index));
}

@Override
public TemporalAmount getTemporalAmount(int index) {
return getTemporalAmount(schema.columnIndexToName(index));
}

@Override
Expand Down Expand Up @@ -688,22 +672,24 @@ public Object[] getTuple(String colName) {

@Override
public byte getEnum8(String colName) {
return readValue(colName);
BinaryStreamReader.EnumValue enumValue = readValue(colName);
return enumValue.byteValue();
}

@Override
public byte getEnum8(int index) {
return readValue(index);
return getEnum8(schema.columnIndexToName(index));
}

@Override
public short getEnum16(String colName) {
return readValue(colName);
BinaryStreamReader.EnumValue enumValue = readValue(colName);
return enumValue.shortValue();
}

@Override
public short getEnum16(int index) {
return readValue(index);
return getEnum16(schema.columnIndexToName(index));
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import java.net.Inet4Address;
import java.net.Inet6Address;
import java.time.*;
import java.time.temporal.TemporalAmount;
import java.util.List;
import java.util.Map;
import java.util.UUID;
Expand Down Expand Up @@ -89,6 +90,11 @@ public Duration getDuration(String colName) {
return reader.getDuration(colName);
}

@Override
public TemporalAmount getTemporalAmount(String colName) {
return reader.getTemporalAmount(colName);
}

@Override
public Inet4Address getInet4Address(String colName) {
return reader.getInet4Address(colName);
Expand Down Expand Up @@ -234,6 +240,11 @@ public Duration getDuration(int index) {
return reader.getDuration(index);
}

@Override
public TemporalAmount getTemporalAmount(int index) {
return reader.getTemporalAmount(index);
}

@Override
public Inet4Address getInet4Address(int index) {
return reader.getInet4Address(index);
Expand Down
Loading
Loading