|
7 | 7 | import com.fasterxml.jackson.core.JsonProcessingException;
|
8 | 8 | import com.fasterxml.jackson.databind.JsonNode;
|
9 | 9 | import com.fasterxml.jackson.databind.ObjectMapper;
|
| 10 | +import com.fasterxml.jackson.databind.node.ArrayNode; |
10 | 11 | import java.math.BigDecimal;
|
| 12 | +import java.sql.Array; |
11 | 13 | import java.sql.Date;
|
12 | 14 | import java.sql.Time;
|
13 | 15 | import java.sql.Timestamp;
|
14 | 16 | import java.sql.Types;
|
| 17 | +import java.util.Iterator; |
| 18 | +import java.util.Map; |
| 19 | +import java.util.Spliterator; |
| 20 | +import java.util.Spliterators; |
15 | 21 | import java.util.TimeZone;
|
| 22 | +import java.util.concurrent.atomic.AtomicInteger; |
| 23 | +import java.util.stream.Stream; |
| 24 | +import java.util.stream.StreamSupport; |
16 | 25 | import net.snowflake.client.core.json.Converters;
|
17 | 26 | import net.snowflake.client.jdbc.ErrorCode;
|
| 27 | +import net.snowflake.client.jdbc.FieldMetadata; |
| 28 | +import net.snowflake.client.jdbc.SnowflakeColumnMetadata; |
18 | 29 | import net.snowflake.client.log.SFLogger;
|
19 | 30 | import net.snowflake.client.log.SFLoggerFactory;
|
| 31 | +import net.snowflake.client.util.JsonStringToTypeConverter; |
20 | 32 |
|
21 | 33 | /** Abstract class used to represent snowflake result set in json format */
|
22 | 34 | public abstract class SFJsonResultSet extends SFBaseResultSet {
|
@@ -83,40 +95,27 @@ public Object getObject(int columnIndex) throws SFException {
|
83 | 95 | return getBoolean(columnIndex);
|
84 | 96 |
|
85 | 97 | case Types.STRUCT:
|
86 |
| - if (Boolean.valueOf(System.getProperty(STRUCTURED_TYPE_ENABLED_PROPERTY_NAME))) { |
| 98 | + if (Boolean.parseBoolean(System.getProperty(STRUCTURED_TYPE_ENABLED_PROPERTY_NAME))) { |
87 | 99 | return getSqlInput((String) obj, columnIndex);
|
88 | 100 | } else {
|
89 | 101 | throw new SFException(ErrorCode.FEATURE_UNSUPPORTED, "data type: " + type);
|
90 | 102 | }
|
| 103 | + case Types.ARRAY: |
| 104 | + if (Boolean.parseBoolean(System.getProperty(STRUCTURED_TYPE_ENABLED_PROPERTY_NAME))) { |
| 105 | + return getArray(columnIndex); |
| 106 | + } else { |
| 107 | + throw new SFException(ErrorCode.FEATURE_UNSUPPORTED, "data type: " + type); |
| 108 | + } |
91 | 109 |
|
92 | 110 | default:
|
93 | 111 | throw new SFException(ErrorCode.FEATURE_UNSUPPORTED, "data type: " + type);
|
94 | 112 | }
|
95 | 113 | }
|
96 | 114 |
|
97 |
| - private Object getSqlInput(String input, int columnIndex) throws SFException { |
98 |
| - try { |
99 |
| - JsonNode jsonNode = OBJECT_MAPPER.readTree(input); |
100 |
| - return new JsonSqlInput( |
101 |
| - jsonNode, |
102 |
| - session, |
103 |
| - converters, |
104 |
| - resultSetMetaData.getColumnMetadata().get(columnIndex - 1).getFields()); |
105 |
| - } catch (JsonProcessingException e) { |
106 |
| - throw new SFException(e, ErrorCode.INVALID_STRUCT_DATA); |
107 |
| - } |
108 |
| - } |
109 |
| - |
110 |
| - /** |
111 |
| - * Sometimes large BIGINTS overflow the java Long type. In these cases, return a BigDecimal type |
112 |
| - * instead. |
113 |
| - * |
114 |
| - * @param columnIndex the column index |
115 |
| - * @return an object of type long or BigDecimal depending on number size |
116 |
| - * @throws SFException |
117 |
| - */ |
118 |
| - private Object getBigInt(int columnIndex, Object obj) throws SFException { |
119 |
| - return converters.getNumberConverter().getBigInt(obj, columnIndex); |
| 115 | + @Override |
| 116 | + public Array getArray(int columnIndex) throws SFException { |
| 117 | + Object obj = getObjectInternal(columnIndex); |
| 118 | + return getArrayInternal((String) obj, columnIndex); |
120 | 119 | }
|
121 | 120 |
|
122 | 121 | @Override
|
@@ -250,4 +249,178 @@ public Date getDate(int columnIndex, TimeZone tz) throws SFException {
|
250 | 249 | private Timestamp getTimestamp(int columnIndex) throws SFException {
|
251 | 250 | return getTimestamp(columnIndex, TimeZone.getDefault());
|
252 | 251 | }
|
| 252 | + |
| 253 | + @Override |
| 254 | + @SnowflakeJdbcInternalApi |
| 255 | + public Converters getConverters() { |
| 256 | + return converters; |
| 257 | + } |
| 258 | + |
| 259 | + private Object getSqlInput(String input, int columnIndex) throws SFException { |
| 260 | + try { |
| 261 | + JsonNode jsonNode = OBJECT_MAPPER.readTree(input); |
| 262 | + return new JsonSqlInput( |
| 263 | + jsonNode, |
| 264 | + session, |
| 265 | + converters, |
| 266 | + resultSetMetaData.getColumnMetadata().get(columnIndex - 1).getFields()); |
| 267 | + } catch (JsonProcessingException e) { |
| 268 | + throw new SFException(e, ErrorCode.INVALID_STRUCT_DATA); |
| 269 | + } |
| 270 | + } |
| 271 | + |
| 272 | + private SfSqlArray getArrayInternal(String obj, int columnIndex) throws SFException { |
| 273 | + try { |
| 274 | + SnowflakeColumnMetadata arrayMetadata = |
| 275 | + resultSetMetaData.getColumnMetadata().get(columnIndex - 1); |
| 276 | + FieldMetadata fieldMetadata = arrayMetadata.getFields().get(0); |
| 277 | + |
| 278 | + int columnSubType = fieldMetadata.getType(); |
| 279 | + int columnType = ColumnTypeHelper.getColumnType(columnSubType, session); |
| 280 | + int scale = fieldMetadata.getScale(); |
| 281 | + |
| 282 | + ArrayNode arrayNode = (ArrayNode) OBJECT_MAPPER.readTree(obj); |
| 283 | + Iterator<JsonNode> nodeElements = arrayNode.elements(); |
| 284 | + |
| 285 | + switch (columnSubType) { |
| 286 | + case Types.INTEGER: |
| 287 | + return new SfSqlArray( |
| 288 | + columnSubType, |
| 289 | + getStream(nodeElements, converters.integerConverter(columnType)) |
| 290 | + .toArray(Integer[]::new)); |
| 291 | + case Types.SMALLINT: |
| 292 | + return new SfSqlArray( |
| 293 | + columnSubType, |
| 294 | + getStream(nodeElements, converters.smallIntConverter(columnType)) |
| 295 | + .toArray(Short[]::new)); |
| 296 | + case Types.TINYINT: |
| 297 | + return new SfSqlArray( |
| 298 | + columnSubType, |
| 299 | + getStream(nodeElements, converters.tinyIntConverter(columnType)) |
| 300 | + .toArray(Byte[]::new)); |
| 301 | + case Types.BIGINT: |
| 302 | + return new SfSqlArray( |
| 303 | + columnSubType, |
| 304 | + getStream(nodeElements, converters.bigIntConverter(columnType)).toArray(Long[]::new)); |
| 305 | + case Types.DECIMAL: |
| 306 | + case Types.NUMERIC: |
| 307 | + return new SfSqlArray( |
| 308 | + columnSubType, |
| 309 | + convertToFixedArray(nodeElements, converters.bigDecimalConverter(columnType))); |
| 310 | + case Types.CHAR: |
| 311 | + case Types.VARCHAR: |
| 312 | + case Types.LONGNVARCHAR: |
| 313 | + return new SfSqlArray( |
| 314 | + columnSubType, |
| 315 | + getStream(nodeElements, converters.varcharConverter(columnType, columnSubType, scale)) |
| 316 | + .toArray(String[]::new)); |
| 317 | + case Types.BINARY: |
| 318 | + return new SfSqlArray( |
| 319 | + columnSubType, |
| 320 | + getStream(nodeElements, converters.bytesConverter(columnType, scale)) |
| 321 | + .toArray(Byte[][]::new)); |
| 322 | + case Types.FLOAT: |
| 323 | + case Types.REAL: |
| 324 | + return new SfSqlArray( |
| 325 | + columnSubType, |
| 326 | + getStream(nodeElements, converters.floatConverter(columnType)).toArray(Float[]::new)); |
| 327 | + case Types.DOUBLE: |
| 328 | + return new SfSqlArray( |
| 329 | + columnSubType, |
| 330 | + getStream(nodeElements, converters.doubleConverter(columnType)) |
| 331 | + .toArray(Double[]::new)); |
| 332 | + case Types.DATE: |
| 333 | + return new SfSqlArray( |
| 334 | + columnSubType, |
| 335 | + getStream(nodeElements, converters.dateConverter(session)).toArray(Date[]::new)); |
| 336 | + case Types.TIME: |
| 337 | + return new SfSqlArray( |
| 338 | + columnSubType, |
| 339 | + getStream(nodeElements, converters.timeConverter(session)).toArray(Time[]::new)); |
| 340 | + case Types.TIMESTAMP: |
| 341 | + return new SfSqlArray( |
| 342 | + columnSubType, |
| 343 | + getStream( |
| 344 | + nodeElements, |
| 345 | + converters.timestampConverter(columnSubType, columnType, scale, session)) |
| 346 | + .toArray(Timestamp[]::new)); |
| 347 | + case Types.BOOLEAN: |
| 348 | + return new SfSqlArray( |
| 349 | + columnSubType, |
| 350 | + getStream(nodeElements, converters.booleanConverter(columnType)) |
| 351 | + .toArray(Boolean[]::new)); |
| 352 | + case Types.STRUCT: |
| 353 | + return new SfSqlArray( |
| 354 | + columnSubType, |
| 355 | + getStream(nodeElements, converters.structConverter(OBJECT_MAPPER)) |
| 356 | + .toArray(Map[]::new)); |
| 357 | + case Types.ARRAY: |
| 358 | + return new SfSqlArray( |
| 359 | + columnSubType, |
| 360 | + getStream(nodeElements, converters.arrayConverter(OBJECT_MAPPER)) |
| 361 | + .toArray(Map[][]::new)); |
| 362 | + default: |
| 363 | + throw new SFException( |
| 364 | + ErrorCode.FEATURE_UNSUPPORTED, |
| 365 | + "Can't construct array for data type: " + columnSubType); |
| 366 | + } |
| 367 | + } catch (JsonProcessingException e) { |
| 368 | + throw new SFException(e, ErrorCode.INVALID_STRUCT_DATA); |
| 369 | + } |
| 370 | + } |
| 371 | + |
| 372 | + private Object[] convertToFixedArray( |
| 373 | + Iterator nodeElements, JsonStringToTypeConverter bigIntConverter) { |
| 374 | + AtomicInteger bigDecimalCount = new AtomicInteger(); |
| 375 | + Object[] elements = |
| 376 | + getStream(nodeElements, bigIntConverter) |
| 377 | + .peek( |
| 378 | + elem -> { |
| 379 | + if (elem instanceof BigDecimal) { |
| 380 | + bigDecimalCount.incrementAndGet(); |
| 381 | + } |
| 382 | + }) |
| 383 | + .toArray( |
| 384 | + size -> { |
| 385 | + boolean shouldbbeReturnAsBigDecimal = bigDecimalCount.get() > 0; |
| 386 | + Class<?> returnedClass = |
| 387 | + shouldbbeReturnAsBigDecimal ? BigDecimal.class : Long.class; |
| 388 | + return java.lang.reflect.Array.newInstance(returnedClass, size); |
| 389 | + }); |
| 390 | + return elements; |
| 391 | + } |
| 392 | + |
| 393 | + private Stream getStream(Iterator nodeElements, JsonStringToTypeConverter converter) { |
| 394 | + return StreamSupport.stream( |
| 395 | + Spliterators.spliteratorUnknownSize(nodeElements, Spliterator.ORDERED), false) |
| 396 | + .map( |
| 397 | + elem -> { |
| 398 | + try { |
| 399 | + return convert(converter, (JsonNode) elem); |
| 400 | + } catch (SFException e) { |
| 401 | + throw new RuntimeException(e); |
| 402 | + } |
| 403 | + }); |
| 404 | + } |
| 405 | + |
| 406 | + private static Object convert(JsonStringToTypeConverter converter, JsonNode node) |
| 407 | + throws SFException { |
| 408 | + if (node.isValueNode()) { |
| 409 | + return converter.convert(node.asText()); |
| 410 | + } else { |
| 411 | + return converter.convert(node.toString()); |
| 412 | + } |
| 413 | + } |
| 414 | + |
| 415 | + /** |
| 416 | + * Sometimes large BIGINTS overflow the java Long type. In these cases, return a BigDecimal type |
| 417 | + * instead. |
| 418 | + * |
| 419 | + * @param columnIndex the column index |
| 420 | + * @return an object of type long or BigDecimal depending on number size |
| 421 | + * @throws SFException |
| 422 | + */ |
| 423 | + private Object getBigInt(int columnIndex, Object obj) throws SFException { |
| 424 | + return converters.getNumberConverter().getBigInt(obj, columnIndex); |
| 425 | + } |
253 | 426 | }
|
0 commit comments