Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

SNOW-1553624 - structured types bindings test all types #1841

Draft
wants to merge 7 commits into
base: master
Choose a base branch
from
Original file line number Diff line number Diff line change
Expand Up @@ -85,9 +85,14 @@ public static BindingParameterMetadata buildBindingSchemaForType(int baseType, b
case Types.DATE:
return FieldSchemaCreator.buildSchemaTypeAndNameOnly(name, "date", Optional.empty());
case Types.TIMESTAMP:
return FieldSchemaCreator.buildSchemaWithScaleAndPrecision(
name, "timestamp", 9, 0, Optional.empty());
case Types.TIME:
return FieldSchemaCreator.buildSchemaWithScaleAndPrecision(
name, "timestamp", 9, 0, Optional.empty());
name, "time", 9, 0, Optional.empty());
sfc-gh-dprzybysz marked this conversation as resolved.
Show resolved Hide resolved
case Types.BINARY:
return FieldSchemaCreator.buildSchemaForBytesType(
name, Optional.empty());
default:
logger.error("Could not create schema for type : " + baseType);
throw new SQLException("Could not create schema for type : " + baseType);
Expand Down
198 changes: 105 additions & 93 deletions src/main/java/net/snowflake/client/core/SFBaseResultSet.java
Original file line number Diff line number Diff line change
Expand Up @@ -288,99 +288,111 @@ protected SfSqlArray getJsonArray(String obj, int columnIndex) throws SFExceptio
int columnType = ColumnTypeHelper.getColumnType(columnSubType, session);
int scale = fieldMetadata.getScale();

ArrayNode arrayNode = (ArrayNode) OBJECT_MAPPER.readTree(obj);
Iterator<JsonNode> nodeElements = arrayNode.elements();

switch (columnType) {
case Types.INTEGER:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().integerConverter(columnType))
.toArray(Integer[]::new));
case Types.SMALLINT:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().smallIntConverter(columnType))
.toArray(Short[]::new));
case Types.TINYINT:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().tinyIntConverter(columnType))
.toArray(Byte[]::new));
case Types.BIGINT:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().bigIntConverter(columnType))
.toArray(Long[]::new));
case Types.DECIMAL:
case Types.NUMERIC:
return new SfSqlArray(
columnSubType,
convertToFixedArray(
getStream(nodeElements, getConverters().bigDecimalConverter(columnType))));
case Types.CHAR:
case Types.VARCHAR:
case Types.LONGNVARCHAR:
return new SfSqlArray(
columnSubType,
getStream(
nodeElements,
getConverters().varcharConverter(columnType, columnSubType, scale))
.toArray(String[]::new));
case Types.BINARY:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().bytesConverter(columnType, scale))
.toArray(Byte[][]::new));
case Types.FLOAT:
case Types.REAL:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().floatConverter(columnType))
.toArray(Float[]::new));
case Types.DOUBLE:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().doubleConverter(columnType))
.toArray(Double[]::new));
case Types.DATE:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().dateStringConverter(session))
.toArray(Date[]::new));
case Types.TIME:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().timeFromStringConverter(session))
.toArray(Time[]::new));
case Types.TIMESTAMP:
return new SfSqlArray(
columnSubType,
getStream(
nodeElements,
getConverters()
.timestampFromStringConverter(
columnSubType, columnType, scale, session, null, sessionTimeZone))
.toArray(Timestamp[]::new));
case Types.BOOLEAN:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().booleanConverter(columnType))
.toArray(Boolean[]::new));
case Types.STRUCT:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().structConverter(OBJECT_MAPPER))
.toArray(Map[]::new));
case Types.ARRAY:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().arrayConverter(OBJECT_MAPPER))
.toArray(Map[][]::new));
default:
throw new SFException(
ErrorCode.FEATURE_UNSUPPORTED,
"Can't construct array for data type: " + columnSubType);
JsonNode data = OBJECT_MAPPER.readTree(obj);

if (data.isNull()) {
return null;
}

if (data.isArray()) {
ArrayNode arrayNode = (ArrayNode) OBJECT_MAPPER.readTree(obj);
Iterator<JsonNode> nodeElements = arrayNode.elements();

switch (columnType) {
case Types.INTEGER:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().integerConverter(columnType))
.toArray(Integer[]::new));
case Types.SMALLINT:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().smallIntConverter(columnType))
.toArray(Short[]::new));
case Types.TINYINT:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().tinyIntConverter(columnType))
.toArray(Byte[]::new));
case Types.BIGINT:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().bigIntConverter(columnType))
.toArray(Long[]::new));
case Types.DECIMAL:
case Types.NUMERIC:
return new SfSqlArray(
columnSubType,
convertToFixedArray(
getStream(nodeElements, getConverters().bigDecimalConverter(columnType))));
case Types.CHAR:
case Types.VARCHAR:
case Types.LONGNVARCHAR:
return new SfSqlArray(
columnSubType,
getStream(
nodeElements,
getConverters().varcharConverter(columnType, columnSubType, scale))
.toArray(String[]::new));
case Types.BINARY:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().bytesConverter(columnType, scale))
.toArray(Byte[][]::new));
case Types.FLOAT:
case Types.REAL:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().floatConverter(columnType))
.toArray(Float[]::new));
case Types.DOUBLE:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().doubleConverter(columnType))
.toArray(Double[]::new));
case Types.DATE:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().dateStringConverter(session))
.toArray(Date[]::new));
case Types.TIME:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().timeFromStringConverter(session))
.toArray(Time[]::new));
case Types.TIMESTAMP:
return new SfSqlArray(
columnSubType,
getStream(
nodeElements,
getConverters()
.timestampFromStringConverter(
columnSubType, columnType, scale, session, null, sessionTimeZone))
.toArray(Timestamp[]::new));
case Types.BOOLEAN:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().booleanConverter(columnType))
.toArray(Boolean[]::new));
case Types.STRUCT:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().structConverter(OBJECT_MAPPER))
.toArray(Map[]::new));
case Types.ARRAY:
return new SfSqlArray(
columnSubType,
getStream(nodeElements, getConverters().arrayConverter(OBJECT_MAPPER))
.toArray(Map[][]::new));
default:
throw new SFException(
ErrorCode.FEATURE_UNSUPPORTED,
"Can't construct array for data type: " + columnSubType);
}
} else {
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can we inverse the condition and skip then else to make less changes here?

throw new SFException(
ErrorCode.INVALID_VALUE_CONVERT,
"Can't construct array from delivered data");
}
} catch (JsonProcessingException e) {
throw new SFException(e, ErrorCode.INVALID_STRUCT_DATA);
Expand Down
16 changes: 14 additions & 2 deletions src/main/java/net/snowflake/client/core/SfSqlArray.java
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import net.snowflake.client.jdbc.BindingParameterMetadata;
import net.snowflake.client.jdbc.SnowflakeUtil;
Expand Down Expand Up @@ -81,9 +82,13 @@ public ResultSet getResultSet(long index, int count, Map<String, Class<?>> map)
@Override
public void free() throws SQLException {}

public String getJsonString() throws SQLException {
public Object getElements() {
sfc-gh-dprzybysz marked this conversation as resolved.
Show resolved Hide resolved
return elements;
}

public <T> String getArrayJsonString(int type) throws SQLException {
try {
return SnowflakeUtil.mapJson(elements);
return SnowflakeUtil.mapArrayElements(elements, type, null);
} catch (JsonProcessingException e) {
throw new SQLException("There is exception during array to json string.", e);
}
Expand All @@ -95,4 +100,11 @@ public BindingParameterMetadata getSchema() throws SQLException {
.withFields(Arrays.asList(buildBindingSchemaForType(getBaseType(), false)))
.build();
}

public BindingParameterMetadata getSchema(List<BindingParameterMetadata> fields) throws SQLException {
return BindingParameterMetadata.BindingParameterMetadataBuilder.bindingParameterMetadata()
.withType("array")
.withFields(fields)
.build();
}
}
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
package net.snowflake.client.core.arrow;

import java.util.HashMap;
import java.util.List;
import java.util.stream.Collectors;
import java.util.Map;
import net.snowflake.client.core.DataConversionContext;
import net.snowflake.client.core.SFException;
import net.snowflake.client.jdbc.SnowflakeType;
Expand All @@ -21,9 +22,11 @@ public MapConverter(MapVector valueVector, int columnIndex, DataConversionContex
public Object toObject(int index) throws SFException {
List<JsonStringHashMap<String, Object>> entriesList =
(List<JsonStringHashMap<String, Object>>) vector.getObject(index);
return entriesList.stream()
.collect(
Collectors.toMap(entry -> entry.get("key").toString(), entry -> entry.get("value")));
Map<String, Object> converted = new HashMap<>();
sfc-gh-dprzybysz marked this conversation as resolved.
Show resolved Hide resolved
for (Map map : entriesList) {
converted.put(map.get("key").toString(), map.get("value"));
}
return converted;
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1729,6 +1729,14 @@ public <T> Map<String, T> getMap(int columnIndex, Class<T> type) throws SQLExcep
sfBaseResultSet.convertToTimestamp(
entry.getValue(), columnType, columnSubType, tz, scale)));

} else if (byte[].class.isAssignableFrom(type)) {
resultMap.put(
entry.getKey(),
mapSFExceptionToSQLException(
() ->
(T)
sfBaseResultSet.getConverters().getBytesConverter().getBytes( entry.getValue(), columnType, columnSubType, scale)));

} else {
logger.debug(
"Unsupported type passed to getObject(int columnIndex,Class<T> type): "
Expand Down
Loading
Loading