diff --git a/engine/api/src/main/java/io/deephaven/engine/table/Table.java b/engine/api/src/main/java/io/deephaven/engine/table/Table.java
index 6d606f464ac..02320d3b8e4 100644
--- a/engine/api/src/main/java/io/deephaven/engine/table/Table.java
+++ b/engine/api/src/main/java/io/deephaven/engine/table/Table.java
@@ -219,6 +219,8 @@ public interface Table extends
String BARRAGE_PERFORMANCE_KEY_ATTRIBUTE = "BarragePerformanceTableKey";
/**
* Set an Apache Arrow POJO Schema to this attribute to control the column encoding used for barrage serialization.
+ *
+ * See {@code org.apache.arrow.vector.types.pojo.Schema}.
*/
String BARRAGE_SCHEMA_ATTRIBUTE = "BarrageSchema";
diff --git a/extensions/barrage/BarrageTypeMapping.md b/extensions/barrage/BarrageTypeMapping.md
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReader.java
index b9c1584a0b9..6405d8689d4 100644
--- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReader.java
+++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/ChunkReader.java
@@ -23,9 +23,9 @@
* data, supporting various data types and logical structures. This interface is part of the Deephaven Barrage
* extensions for handling streamed data ingestion.
*
- * @param The type of chunk being read, extending {@link WritableChunk} with {@link Values}.
+ * @param The type of chunk being read, extending {@link WritableChunk} with {@link Values}.
*/
-public interface ChunkReader> {
+public interface ChunkReader> {
/**
* Supports creation of {@link ChunkReader} instances to use when processing a flight stream. JVM implementations
@@ -55,7 +55,7 @@ > ChunkReader newReader(
* @throws IOException if an error occurred while reading the stream
*/
@FinalDefault
- default ReadChunkType readChunk(
+ default READ_CHUNK_TYPE readChunk(
@NotNull Iterator fieldNodeIter,
@NotNull PrimitiveIterator.OfLong bufferInfoIter,
@NotNull DataInput is) throws IOException {
@@ -74,7 +74,7 @@ default ReadChunkType readChunk(
* @return a Chunk containing the data from the stream
* @throws IOException if an error occurred while reading the stream
*/
- ReadChunkType readChunk(
+ READ_CHUNK_TYPE readChunk(
@NotNull Iterator fieldNodeIter,
@NotNull PrimitiveIterator.OfLong bufferInfoIter,
@NotNull DataInput is,
diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReaderFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReaderFactory.java
index 3c52e581489..14c51a74c4d 100644
--- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReaderFactory.java
+++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkReaderFactory.java
@@ -648,6 +648,9 @@ private static ChunkReader> decimalToBig
}
BigInteger unscaledValue = new BigInteger(value);
+ if (scale == 0) {
+ return unscaledValue;
+ }
return unscaledValue.divide(BigInteger.TEN.pow(scale));
});
}
@@ -702,17 +705,22 @@ private static ChunkReader> intToByte(
final BarrageOptions options) {
final ArrowType.Int intType = (ArrowType.Int) arrowType;
final int bitWidth = intType.getBitWidth();
+ final boolean unsigned = !intType.getIsSigned();
switch (bitWidth) {
case 8:
- // note unsigned mappings to byte will overflow byte; but user has asked for this
+ // note unsigned mappings to byte will overflow; but user has asked for this
return new ByteChunkReader(options);
case 16:
- // note shorts may overflow byte; but user has asked for this
+ if (unsigned) {
+ // note shorts may overflow; but user has asked for this
+ return ByteChunkReader.transformTo(new CharChunkReader(options),
+ (chunk, ii) -> QueryLanguageFunctionUtils.byteCast(chunk.get(ii)));
+ }
return ByteChunkReader.transformTo(new ShortChunkReader(options),
(chunk, ii) -> QueryLanguageFunctionUtils.byteCast(chunk.get(ii)));
case 32:
- // note ints may overflow byte; but user has asked for this
+ // note ints may overflow; but user has asked for this
return ByteChunkReader.transformTo(new IntChunkReader(options),
(chunk, ii) -> QueryLanguageFunctionUtils.byteCast(chunk.get(ii)));
case 64:
@@ -735,17 +743,19 @@ private static ChunkReader> intToShort(
switch (bitWidth) {
case 8:
return ShortChunkReader.transformTo(new ByteChunkReader(options),
- (chunk, ii) -> maskIfOverflow(unsigned,
- Byte.BYTES, QueryLanguageFunctionUtils.shortCast(chunk.get(ii))));
+ (chunk, ii) -> maskIfOverflow(unsigned, QueryLanguageFunctionUtils.shortCast(chunk.get(ii))));
case 16:
- // note unsigned mappings to short will overflow short; but user has asked for this
+ if (unsigned) {
+ return ShortChunkReader.transformTo(new CharChunkReader(options),
+ (chunk, ii) -> QueryLanguageFunctionUtils.shortCast(chunk.get(ii)));
+ }
return new ShortChunkReader(options);
case 32:
- // note ints may overflow short; but user has asked for this
+ // note ints may overflow; but user has asked for this
return ShortChunkReader.transformTo(new IntChunkReader(options),
(chunk, ii) -> QueryLanguageFunctionUtils.shortCast(chunk.get(ii)));
case 64:
- // note longs may overflow short; but user has asked for this
+ // note longs may overflow; but user has asked for this
return ShortChunkReader.transformTo(new LongChunkReader(options),
(chunk, ii) -> QueryLanguageFunctionUtils.shortCast(chunk.get(ii)));
default:
@@ -767,6 +777,10 @@ private static ChunkReader> intToInt(
(chunk, ii) -> maskIfOverflow(unsigned, Byte.BYTES,
QueryLanguageFunctionUtils.intCast(chunk.get(ii))));
case 16:
+ if (unsigned) {
+ return IntChunkReader.transformTo(new CharChunkReader(options),
+ (chunk, ii) -> QueryLanguageFunctionUtils.intCast(chunk.get(ii)));
+ }
return IntChunkReader.transformTo(new ShortChunkReader(options), (chunk, ii) -> maskIfOverflow(unsigned,
Short.BYTES, QueryLanguageFunctionUtils.intCast(chunk.get(ii))));
case 32:
@@ -795,6 +809,10 @@ private static ChunkReader> intToLong(
(chunk, ii) -> maskIfOverflow(unsigned, Byte.BYTES,
QueryLanguageFunctionUtils.longCast(chunk.get(ii))));
case 16:
+ if (unsigned) {
+ return LongChunkReader.transformTo(new CharChunkReader(options),
+ (chunk, ii) -> QueryLanguageFunctionUtils.longCast(chunk.get(ii)));
+ }
return LongChunkReader.transformTo(new ShortChunkReader(options),
(chunk, ii) -> maskIfOverflow(unsigned,
Short.BYTES, QueryLanguageFunctionUtils.longCast(chunk.get(ii))));
@@ -822,6 +840,10 @@ private static ChunkReader> intToBigInt(
return transformToObject(new ByteChunkReader(options), (chunk, ii) -> toBigInt(maskIfOverflow(
unsigned, Byte.BYTES, QueryLanguageFunctionUtils.longCast(chunk.get(ii)))));
case 16:
+ if (unsigned) {
+ return transformToObject(new CharChunkReader(options),
+ (chunk, ii) -> toBigInt(QueryLanguageFunctionUtils.longCast(chunk.get(ii))));
+ }
return transformToObject(new ShortChunkReader(options), (chunk, ii) -> toBigInt(maskIfOverflow(
unsigned, Short.BYTES, QueryLanguageFunctionUtils.longCast(chunk.get(ii)))));
case 32:
@@ -848,6 +870,10 @@ private static ChunkReader> intToFloat(
return FloatChunkReader.transformTo(new ByteChunkReader(options),
(chunk, ii) -> floatCast(Byte.BYTES, signed, chunk.isNull(ii), chunk.get(ii)));
case 16:
+ if (!signed) {
+ return FloatChunkReader.transformTo(new CharChunkReader(options),
+ (chunk, ii) -> floatCast(Character.BYTES, signed, chunk.isNull(ii), chunk.get(ii)));
+ }
return FloatChunkReader.transformTo(new ShortChunkReader(options),
(chunk, ii) -> floatCast(Short.BYTES, signed, chunk.isNull(ii), chunk.get(ii)));
case 32:
@@ -898,6 +924,10 @@ private static ChunkReader> intToDouble(
return DoubleChunkReader.transformTo(new ByteChunkReader(options),
(chunk, ii) -> doubleCast(Byte.BYTES, signed, chunk.isNull(ii), chunk.get(ii)));
case 16:
+ if (!signed) {
+ return DoubleChunkReader.transformTo(new CharChunkReader(options),
+ (chunk, ii) -> doubleCast(Character.BYTES, signed, chunk.isNull(ii), chunk.get(ii)));
+ }
return DoubleChunkReader.transformTo(new ShortChunkReader(options),
(chunk, ii) -> doubleCast(Short.BYTES, signed, chunk.isNull(ii), chunk.get(ii)));
case 32:
@@ -948,6 +978,10 @@ private static ChunkReader> intToBigDeci
return transformToObject(new ByteChunkReader(options), (chunk, ii) -> toBigDecimal(maskIfOverflow(
unsigned, Byte.BYTES, QueryLanguageFunctionUtils.longCast(chunk.get(ii)))));
case 16:
+ if (unsigned) {
+ return transformToObject(new CharChunkReader(options), (chunk, ii) -> toBigDecimal(maskIfOverflow(
+ unsigned, Character.BYTES, QueryLanguageFunctionUtils.longCast(chunk.get(ii)))));
+ }
return transformToObject(new ShortChunkReader(options), (chunk, ii) -> toBigDecimal(maskIfOverflow(
unsigned, Short.BYTES, QueryLanguageFunctionUtils.longCast(chunk.get(ii)))));
case 32:
@@ -983,11 +1017,11 @@ private static ChunkReader> intToChar(
(chunk, ii) -> QueryLanguageFunctionUtils.charCast(chunk.get(ii)));
}
case 32:
- // note unsigned mappings to char will overflow short; but user has asked for this
+ // note int mappings to char will overflow; but user has asked for this
return CharChunkReader.transformTo(new IntChunkReader(options),
(chunk, ii) -> QueryLanguageFunctionUtils.charCast(chunk.get(ii)));
case 64:
- // note unsigned mappings to short will overflow short; but user has asked for this
+ // note long mappings to short will overflow; but user has asked for this
return CharChunkReader.transformTo(new LongChunkReader(options),
(chunk, ii) -> QueryLanguageFunctionUtils.charCast(chunk.get(ii)));
default:
@@ -1248,16 +1282,17 @@ private static BigDecimal toBigDecimal(final long value) {
*
* Special handling is included to preserve the value of null-equivalent constants and to skip masking for signed
* values.
+ *
+ * Note that short can only be sign extended from byte so we don't need to consider other numByte configurations.
*
* @param unsigned Whether the value should be treated as unsigned.
- * @param numBytes The number of bytes to constrain the value to (e.g., 1 for byte, 2 for short).
* @param value The input value to potentially mask.
* @return The masked value if unsigned and overflow occurs; otherwise, the original value.
*/
@SuppressWarnings("SameParameterValue")
- private static short maskIfOverflow(final boolean unsigned, final int numBytes, short value) {
+ private static short maskIfOverflow(final boolean unsigned, short value) {
if (unsigned && value != QueryConstants.NULL_SHORT) {
- value &= (short) ((1L << (numBytes * 8)) - 1);
+ value &= (short) ((1L << 8) - 1);
}
return value;
}
@@ -1332,13 +1367,13 @@ private static BigInteger maskIfOverflow(final boolean unsigned, final int numBy
return value;
}
- private interface ToObjectTransformFunction> {
- T get(WireChunkType wireValues, int wireOffset);
+ private interface ToObjectTransformFunction> {
+ T get(WIRE_CHUNK_TYPE wireValues, int wireOffset);
}
- private static , CR extends ChunkReader> ChunkReader> transformToObject(
+ private static , CR extends ChunkReader> ChunkReader> transformToObject(
final CR wireReader,
- final ToObjectTransformFunction wireTransform) {
+ final ToObjectTransformFunction wireTransform) {
return new TransformingChunkReader<>(
wireReader,
WritableObjectChunk::makeWritableChunk,
diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkWriterFactory.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkWriterFactory.java
index 24f08c37e83..f3cf9ea748b 100644
--- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkWriterFactory.java
+++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DefaultChunkWriterFactory.java
@@ -15,6 +15,7 @@
import io.deephaven.chunk.ObjectChunk;
import io.deephaven.chunk.ShortChunk;
import io.deephaven.chunk.WritableByteChunk;
+import io.deephaven.chunk.WritableCharChunk;
import io.deephaven.chunk.WritableDoubleChunk;
import io.deephaven.chunk.WritableFloatChunk;
import io.deephaven.chunk.WritableIntChunk;
@@ -806,6 +807,15 @@ private static ChunkWriter> intFromByte(
case 8:
return ByteChunkWriter.getIdentity(typeInfo.arrowField().isNullable());
case 16:
+ if (!intType.getIsSigned()) {
+ return new CharChunkWriter<>((ByteChunk source) -> {
+ final WritableCharChunk chunk = WritableCharChunk.makeWritableChunk(source.size());
+ for (int ii = 0; ii < source.size(); ++ii) {
+ chunk.set(ii, QueryLanguageFunctionUtils.charCast(source.get(ii)));
+ }
+ return chunk;
+ }, ByteChunk::getEmptyChunk, typeInfo.arrowField().isNullable());
+ }
return new ShortChunkWriter<>((ByteChunk source) -> {
final WritableShortChunk chunk = WritableShortChunk.makeWritableChunk(source.size());
for (int ii = 0; ii < source.size(); ++ii) {
@@ -849,6 +859,15 @@ private static ChunkWriter> intFromShort(
return chunk;
}, ShortChunk::getEmptyChunk, typeInfo.arrowField().isNullable());
case 16:
+ if (!intType.getIsSigned()) {
+ return new CharChunkWriter<>((ShortChunk source) -> {
+ final WritableCharChunk chunk = WritableCharChunk.makeWritableChunk(source.size());
+ for (int ii = 0; ii < source.size(); ++ii) {
+ chunk.set(ii, QueryLanguageFunctionUtils.charCast(source.get(ii)));
+ }
+ return chunk;
+ }, ShortChunk::getEmptyChunk, typeInfo.arrowField().isNullable());
+ }
return ShortChunkWriter.getIdentity(typeInfo.arrowField().isNullable());
case 32:
return new IntChunkWriter<>((ShortChunk source) -> {
diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkReader.java
index d91a85a88c2..15a4956cd8b 100644
--- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkReader.java
+++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/DoubleChunkReader.java
@@ -28,13 +28,13 @@
public class DoubleChunkReader extends BaseChunkReader> {
private static final String DEBUG_NAME = "DoubleChunkReader";
- public interface ToDoubleTransformFunction> {
- double get(WireChunkType wireValues, int wireOffset);
+ public interface ToDoubleTransformFunction> {
+ double get(WIRE_CHUNK_TYPE wireValues, int wireOffset);
}
- public static , T extends ChunkReader> ChunkReader> transformTo(
+ public static , T extends ChunkReader> ChunkReader> transformTo(
final T wireReader,
- final ToDoubleTransformFunction wireTransform) {
+ final ToDoubleTransformFunction wireTransform) {
return new TransformingChunkReader<>(
wireReader,
WritableDoubleChunk::makeWritableChunk,
diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkReader.java
index 5008c2258ee..54a46fe0e37 100644
--- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkReader.java
+++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/FloatChunkReader.java
@@ -24,13 +24,13 @@
public class FloatChunkReader extends BaseChunkReader> {
private static final String DEBUG_NAME = "FloatChunkReader";
- public interface ToFloatTransformFunction> {
- float get(WireChunkType wireValues, int wireOffset);
+ public interface ToFloatTransformFunction> {
+ float get(WIRE_CHUNK_TYPE wireValues, int wireOffset);
}
- public static , T extends ChunkReader> ChunkReader> transformTo(
+ public static , T extends ChunkReader> ChunkReader> transformTo(
final T wireReader,
- final ToFloatTransformFunction wireTransform) {
+ final ToFloatTransformFunction wireTransform) {
return new TransformingChunkReader<>(
wireReader,
WritableFloatChunk::makeWritableChunk,
diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/NullChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/NullChunkReader.java
index a3b45dc837b..9b8832f7f03 100644
--- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/NullChunkReader.java
+++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/NullChunkReader.java
@@ -14,7 +14,7 @@
import java.util.Iterator;
import java.util.PrimitiveIterator;
-public class NullChunkReader> extends BaseChunkReader {
+public class NullChunkReader> extends BaseChunkReader {
private final ChunkType resultType;
@@ -23,7 +23,7 @@ public NullChunkReader(Class> destType) {
}
@Override
- public ReadChunkType readChunk(
+ public READ_CHUNK_TYPE readChunk(
@NotNull final Iterator fieldNodeIter,
@NotNull final PrimitiveIterator.OfLong bufferInfoIter,
@NotNull final DataInput is,
@@ -42,6 +42,6 @@ public ReadChunkType readChunk(
chunk.fillWithNullValue(0, nodeInfo.numElements);
// noinspection unchecked
- return (ReadChunkType) chunk;
+ return (READ_CHUNK_TYPE) chunk;
}
}
diff --git a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/TransformingChunkReader.java b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/TransformingChunkReader.java
index 6689e9e45ac..5aacd5e1fd7 100644
--- a/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/TransformingChunkReader.java
+++ b/extensions/barrage/src/main/java/io/deephaven/extensions/barrage/chunk/TransformingChunkReader.java
@@ -19,26 +19,26 @@
/**
* A {@link ChunkReader} that reads a chunk of wire values and transforms them into a different chunk type.
*
- * @param the input chunk type
- * @param the output chunk type
+ * @param the input chunk type
+ * @param the output chunk type
*/
-public class TransformingChunkReader, OutputChunkType extends WritableChunk>
- extends BaseChunkReader {
+public class TransformingChunkReader, OUTPUT_CHUNK_TYPE extends WritableChunk>
+ extends BaseChunkReader {
- public interface TransformFunction, OutputChunkType extends WritableChunk> {
- void apply(InputChunkType wireValues, OutputChunkType outChunk, int wireOffset, int outOffset);
+ public interface TransformFunction, OUTPUT_CHUNK_TYPE extends WritableChunk> {
+ void apply(INPUT_CHUNK_TYPE wireValues, OUTPUT_CHUNK_TYPE outChunk, int wireOffset, int outOffset);
}
- private final ChunkReader wireChunkReader;
- private final IntFunction chunkFactory;
- private final Function, OutputChunkType> castFunction;
- private final TransformFunction transformFunction;
+ private final ChunkReader wireChunkReader;
+ private final IntFunction chunkFactory;
+ private final Function, OUTPUT_CHUNK_TYPE> castFunction;
+ private final TransformFunction transformFunction;
public TransformingChunkReader(
- @NotNull final ChunkReader wireChunkReader,
- final IntFunction chunkFactory,
- final Function, OutputChunkType> castFunction,
- final TransformFunction transformFunction) {
+ @NotNull final ChunkReader wireChunkReader,
+ final IntFunction chunkFactory,
+ final Function, OUTPUT_CHUNK_TYPE> castFunction,
+ final TransformFunction transformFunction) {
this.wireChunkReader = wireChunkReader;
this.chunkFactory = chunkFactory;
this.castFunction = castFunction;
@@ -46,15 +46,15 @@ public TransformingChunkReader(
}
@Override
- public OutputChunkType readChunk(
+ public OUTPUT_CHUNK_TYPE readChunk(
@NotNull final Iterator fieldNodeIter,
@NotNull final PrimitiveIterator.OfLong bufferInfoIter,
@NotNull final DataInput is,
@Nullable final WritableChunk outChunk,
final int outOffset,
final int totalRows) throws IOException {
- try (final InputChunkType wireValues = wireChunkReader.readChunk(fieldNodeIter, bufferInfoIter, is)) {
- final OutputChunkType chunk = castOrCreateChunk(
+ try (final INPUT_CHUNK_TYPE wireValues = wireChunkReader.readChunk(fieldNodeIter, bufferInfoIter, is)) {
+ final OUTPUT_CHUNK_TYPE chunk = castOrCreateChunk(
outChunk, Math.max(totalRows, wireValues.size()), chunkFactory, castFunction);
if (outChunk == null) {
// if we're not given an output chunk then we better be writing at the front of the new one
diff --git a/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java b/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java
index 078679c5d21..9c487d6e7b8 100644
--- a/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java
+++ b/extensions/barrage/src/test/java/io/deephaven/extensions/barrage/chunk/BarrageColumnRoundTripTest.java
@@ -231,7 +231,8 @@ public void testLongChunkSerialization() throws IOException {
}
}
- private static void longIdentityValidator(WritableChunk utO, WritableChunk utC, RowSequence subset, int offset) {
+ private static void longIdentityValidator(WritableChunk utO, WritableChunk utC, RowSequence subset,
+ int offset) {
final WritableLongChunk original = utO.asWritableLongChunk();
final WritableLongChunk computed = utC.asWritableLongChunk();
if (subset == null) {
diff --git a/go/pkg/client/example_import_table_test.go b/go/pkg/client/example_import_table_test.go
index a1310e13e57..d0ee1c27629 100644
--- a/go/pkg/client/example_import_table_test.go
+++ b/go/pkg/client/example_import_table_test.go
@@ -93,7 +93,7 @@ func Example_importTable() {
// metadata: ["deephaven:isDateFormat": "false", "deephaven:isNumberFormat": "false", "deephaven:isPartitioning": "false", "deephaven:isRowStyle": "false", "deephaven:isSortable": "true", "deephaven:isStyle": "false", "deephaven:type": "float"]
// - Volume: type=int32, nullable
// metadata: ["deephaven:isDateFormat": "false", "deephaven:isNumberFormat": "false", "deephaven:isPartitioning": "false", "deephaven:isRowStyle": "false", "deephaven:isSortable": "true", "deephaven:isStyle": "false", "deephaven:type": "int"]
- // metadata: ["deephaven:attribute.AddOnly": "true", "deephaven:attribute.AppendOnly": "true", "deephaven:attribute.SortedColumns": "Close=Ascending", "deephaven:attribute_type.AddOnly": "java.lang.Boolean", "deephaven:attribute_type.AppendOnly": "java.lang.Boolean", "deephaven:attribute_type.SortedColumns": "java.lang.String"]
+ // metadata: ["deephaven:attribute.AddOnly": "true", "deephaven:attribute.AppendOnly": "true", "deephaven:attribute.SortedColumns": "Close=Ascending", "deephaven:attribute_type.AddOnly": "java.lang.Boolean", "deephaven:attribute_type.AppendOnly": "java.lang.Boolean", "deephaven:attribute_type.SortedColumns": "java.lang.String", "deephaven:unsent.attribute.BarrageSchema": ""]
// rows: 5
// col[0][Ticker]: ["IBM" "XRX" "XYZZY" "GME" "ZNGA"]
// col[1][Close]: [38.7 53.8 88.5 453 544.9]
diff --git a/go/pkg/client/example_table_ops_test.go b/go/pkg/client/example_table_ops_test.go
index 2c8e22d02df..00a55e8efb7 100644
--- a/go/pkg/client/example_table_ops_test.go
+++ b/go/pkg/client/example_table_ops_test.go
@@ -34,7 +34,7 @@ func Example_tableOps() {
fmt.Println(queryResult)
- // Output:
+ // Output:
// Data Before:
// record:
// schema:
@@ -47,7 +47,7 @@ func Example_tableOps() {
// col[1][Close]: [53.8 88.5 38.7 453 26.7 544.9 13.4]
// col[2][Volume]: [87000 6060842 138000 138000000 19000 48300 1500]
//
- // Data After:
+ // New data:
// record:
// schema:
// fields: 3
@@ -57,39 +57,28 @@ func Example_tableOps() {
// metadata: ["deephaven:isDateFormat": "false", "deephaven:isNumberFormat": "false", "deephaven:isPartitioning": "false", "deephaven:isRowStyle": "false", "deephaven:isSortable": "true", "deephaven:isStyle": "false", "deephaven:type": "float"]
// - Volume: type=int32, nullable
// metadata: ["deephaven:isDateFormat": "false", "deephaven:isNumberFormat": "false", "deephaven:isPartitioning": "false", "deephaven:isRowStyle": "false", "deephaven:isSortable": "true", "deephaven:isStyle": "false", "deephaven:type": "int"]
- // metadata: ["deephaven:attribute.AddOnly": "true", "deephaven:attribute.AppendOnly": "true", "deephaven:attribute.SortedColumns": "Close=Ascending", "deephaven:attribute_type.AddOnly": "java.lang.Boolean", "deephaven:attribute_type.AppendOnly": "java.lang.Boolean", "deephaven:attribute_type.SortedColumns": "java.lang.String", "deephaven:unsent.attribute.BarrageSchema": ""]
+ // metadata: ["deephaven:attribute.AddOnly": "true", "deephaven:attribute.AppendOnly": "true", "deephaven:attribute_type.AddOnly": "java.lang.Boolean", "deephaven:attribute_type.AppendOnly": "java.lang.Boolean", "deephaven:unsent.attribute.BarrageSchema": ""]
// rows: 5
- // col[0][Ticker]: ["IBM" "XRX" "XYZZY" "GME" "ZNGA"]
- // col[1][Close]: [38.7 53.8 88.5 453 544.9]
- // col[2][Volume]: [138000 87000 6060842 138000000 48300]
- // want:
- // Data Before:
- // record:
- // schema:
- // fields: 3
- // - Ticker: type=utf8, nullable
- // - Close: type=float32, nullable
- // - Volume: type=int32, nullable
- // rows: 7
- // col[0][Ticker]: ["XRX" "XYZZY" "IBM" "GME" "AAPL" "ZNGA" "T"]
- // col[1][Close]: [53.8 88.5 38.7 453 26.7 544.9 13.4]
- // col[2][Volume]: [87000 6060842 138000 138000000 19000 48300 1500]
+ // col[0][Ticker]: ["XRX" "IBM" "GME" "AAPL" "ZNGA"]
+ // col[1][Close]: [53.8 38.7 453 26.7 544.9]
+ // col[2][Volume]: [87000 138000 138000000 19000 48300]
//
- // Data After:
// record:
// schema:
- // fields: 3
+ // fields: 4
// - Ticker: type=utf8, nullable
// metadata: ["deephaven:isDateFormat": "false", "deephaven:isNumberFormat": "false", "deephaven:isPartitioning": "false", "deephaven:isRowStyle": "false", "deephaven:isSortable": "true", "deephaven:isStyle": "false", "deephaven:type": "java.lang.String"]
// - Close: type=float32, nullable
// metadata: ["deephaven:isDateFormat": "false", "deephaven:isNumberFormat": "false", "deephaven:isPartitioning": "false", "deephaven:isRowStyle": "false", "deephaven:isSortable": "true", "deephaven:isStyle": "false", "deephaven:type": "float"]
// - Volume: type=int32, nullable
// metadata: ["deephaven:isDateFormat": "false", "deephaven:isNumberFormat": "false", "deephaven:isPartitioning": "false", "deephaven:isRowStyle": "false", "deephaven:isSortable": "true", "deephaven:isStyle": "false", "deephaven:type": "int"]
- // metadata: ["deephaven:attribute.AddOnly": "true", "deephaven:attribute.AppendOnly": "true", "deephaven:attribute.SortedColumns": "Close=Ascending", "deephaven:attribute_type.AddOnly": "java.lang.Boolean", "deephaven:attribute_type.AppendOnly": "java.lang.Boolean", "deephaven:attribute_type.SortedColumns": "java.lang.String"]
+ // - Magnitude: type=int32, nullable
+ // metadata: ["deephaven:isDateFormat": "false", "deephaven:isNumberFormat": "false", "deephaven:isPartitioning": "false", "deephaven:isRowStyle": "false", "deephaven:isSortable": "true", "deephaven:isStyle": "false", "deephaven:type": "int"]
// rows: 5
- // col[0][Ticker]: ["IBM" "XRX" "XYZZY" "GME" "ZNGA"]
- // col[1][Close]: [38.7 53.8 88.5 453 544.9]
- // col[2][Volume]: [138000 87000 6060842 138000000 48300]
+ // col[0][Ticker]: ["XRX" "IBM" "GME" "AAPL" "ZNGA"]
+ // col[1][Close]: [53.8 38.7 453 26.7 544.9]
+ // col[2][Volume]: [87000 138000 138000000 19000 48300]
+ // col[3][Magnitude]: [10000 100000 100000000 10000 10000]
}
// This function demonstrates how to use immediate table operations.
diff --git a/server/jetty/src/test/java/io/deephaven/server/jetty/JettyBarrageChunkFactoryTest.java b/server/jetty/src/test/java/io/deephaven/server/jetty/JettyBarrageChunkFactoryTest.java
index 8e3a9f2c2df..ade04d7ec4d 100644
--- a/server/jetty/src/test/java/io/deephaven/server/jetty/JettyBarrageChunkFactoryTest.java
+++ b/server/jetty/src/test/java/io/deephaven/server/jetty/JettyBarrageChunkFactoryTest.java
@@ -7,7 +7,6 @@
import dagger.Module;
import dagger.Provides;
import dagger.multibindings.IntoSet;
-import io.deephaven.UncheckedDeephavenException;
import io.deephaven.auth.AuthContext;
import io.deephaven.base.clock.Clock;
import io.deephaven.client.impl.BearerHandler;
@@ -24,7 +23,6 @@
import io.deephaven.engine.util.AbstractScriptSession;
import io.deephaven.engine.util.NoLanguageDeephavenSession;
import io.deephaven.engine.util.ScriptSession;
-import io.deephaven.engine.util.TableTools;
import io.deephaven.extensions.barrage.util.BarrageUtil;
import io.deephaven.io.logger.LogBuffer;
import io.deephaven.io.logger.LogBufferGlobal;
@@ -51,6 +49,7 @@
import io.deephaven.server.test.TestAuthModule;
import io.deephaven.server.test.TestAuthorizationProvider;
import io.deephaven.server.util.Scheduler;
+import io.deephaven.util.QueryConstants;
import io.deephaven.util.SafeCloseable;
import io.grpc.CallOptions;
import io.grpc.Channel;
@@ -72,9 +71,12 @@
import org.apache.arrow.flight.auth2.Auth2Constants;
import org.apache.arrow.memory.BufferAllocator;
import org.apache.arrow.memory.RootAllocator;
-import org.apache.arrow.vector.IntVector;
+import org.apache.arrow.vector.FieldVector;
+import org.apache.arrow.vector.SmallIntVector;
+import org.apache.arrow.vector.TinyIntVector;
+import org.apache.arrow.vector.UInt1Vector;
+import org.apache.arrow.vector.UInt2Vector;
import org.apache.arrow.vector.VectorSchemaRoot;
-import org.apache.arrow.vector.types.Types;
import org.apache.arrow.vector.types.pojo.ArrowType;
import org.apache.arrow.vector.types.pojo.Field;
import org.apache.arrow.vector.types.pojo.FieldType;
@@ -98,21 +100,22 @@
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
+import java.util.Random;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.function.BiConsumer;
-import java.util.function.Consumer;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
public class JettyBarrageChunkFactoryTest {
private static final String COLUMN_NAME = "test_col";
+ private static final int NUM_ROWS = 1000;
+ private static final int RANDOM_SEED = 42;
@Module
public interface JettyTestConfig {
@@ -318,7 +321,7 @@ public TestAuthClientInterceptor(String bearerToken) {
@Override
public ClientCall interceptCall(MethodDescriptor method,
- CallOptions callOptions, Channel next) {
+ CallOptions callOptions, Channel next) {
return next.newCall(method, callOptions.withCallCredentials(callCredentials));
}
}
@@ -374,122 +377,344 @@ protected void after() {
}
};
- private void fullyReadStream(Ticket ticket, boolean expectError) {
- try (final FlightStream stream = flightClient.getStream(ticket)) {
- // noinspection StatementWithEmptyBody
- while (stream.next());
- if (expectError) {
- fail("expected error");
- }
- } catch (Exception ignored) {
- }
+ private Schema createSchema(boolean nullable, ArrowType arrowType, Class> dhType) {
+ return createSchema(nullable, arrowType, dhType, null);
}
- private Schema createSchema(final ArrowType arrowType, final Class> dhType) {
- return createSchema(arrowType, dhType, null);
- }
-
- private Schema createSchema(final ArrowType arrowType, final Class> dhType, final Class> dhComponentType) {
+ private Schema createSchema(
+ final boolean nullable,
+ final ArrowType arrowType,
+ final Class> dhType,
+ final Class> dhComponentType) {
final Map attrs = new HashMap<>();
attrs.put(BarrageUtil.ATTR_DH_PREFIX + BarrageUtil.ATTR_TYPE_TAG, dhType.getCanonicalName());
if (dhComponentType != null) {
attrs.put(BarrageUtil.ATTR_DH_PREFIX + BarrageUtil.ATTR_COMPONENT_TYPE_TAG,
dhComponentType.getCanonicalName());
}
- final FieldType fieldType = new FieldType(true, arrowType, null, attrs);
+ final FieldType fieldType = new FieldType(nullable, arrowType, null, attrs);
return new Schema(Collections.singletonList(
new Field(COLUMN_NAME, fieldType, null)));
}
@Test
public void testInt8() throws Exception {
- final int numRows = 16;
- final Consumer setupData = source -> {
- IntVector vector = (IntVector) source.getFieldVectors().get(0);
- for (int ii = 0; ii < numRows; ++ii) {
- if (ii % 2 == 0) {
- vector.setNull(ii);
- } else {
- vector.set(ii, (byte) (ii - 8));
+ class Test extends RoundTripTest {
+ Test(Class> dhType) {
+ super(dhType);
+ }
+
+ @Override
+ public Schema newSchema(boolean isNullable) {
+ return createSchema(isNullable, new ArrowType.Int(8, true), dhType);
+ }
+
+ @Override
+ public int initializeRoot(@NotNull TinyIntVector source) {
+ int start = setAll(source::set,
+ QueryConstants.MIN_BYTE, QueryConstants.MAX_BYTE, (byte) -1, (byte) 0, (byte) 1);
+ for (int ii = start; ii < NUM_ROWS; ++ii) {
+ byte value = (byte) rnd.nextInt();
+ source.set(ii, value);
+ if (value == QueryConstants.NULL_BYTE) {
+ --ii;
+ }
+ }
+ return NUM_ROWS;
+ }
+
+ @Override
+ public void validate(@NotNull TinyIntVector source, @NotNull TinyIntVector dest) {
+ for (int ii = 0; ii < source.getValueCount(); ++ii) {
+ if (source.isNull(ii)) {
+ assertTrue(dest.isNull(ii));
+ } else if (dhType == char.class && source.get(ii) == -1) {
+ // this is going to be coerced to null if nullable or else NULL_BYTE if non-nullable
+ assertTrue(dest.isNull(ii) || dest.get(ii) == QueryConstants.NULL_BYTE);
+ } else {
+ assertEquals(source.get(ii), dest.get(ii));
+ }
+ }
+ }
+ }
+
+ new Test(byte.class).doTest();
+ new Test(char.class).doTest();
+ new Test(short.class).doTest();
+ new Test(int.class).doTest();
+ new Test(long.class).doTest();
+ new Test(float.class).doTest();
+ new Test(double.class).doTest();
+ new Test(BigInteger.class).doTest();
+ new Test(BigDecimal.class).doTest();
+ }
+
+ @Test
+ public void testUint8() throws Exception {
+ class Test extends RoundTripTest {
+ Test(Class> dhType) {
+ super(dhType);
+ }
+
+ @Override
+ public Schema newSchema(boolean isNullable) {
+ return createSchema(isNullable, new ArrowType.Int(8, false), dhType);
+ }
+
+ @Override
+ public int initializeRoot(@NotNull UInt1Vector source) {
+ int start = setAll(source::set,
+ QueryConstants.MIN_BYTE, QueryConstants.MAX_BYTE, (byte) -1, (byte) 0, (byte) 1);
+ for (int ii = start; ii < NUM_ROWS; ++ii) {
+ byte value = (byte) rnd.nextInt();
+ source.set(ii, value);
+ if (value == QueryConstants.NULL_BYTE) {
+ --ii;
+ }
+ }
+ return NUM_ROWS;
+ }
+
+ @Override
+ public void validate(@NotNull UInt1Vector source, @NotNull UInt1Vector dest) {
+ for (int ii = 0; ii < source.getValueCount(); ++ii) {
+ if (source.isNull(ii)) {
+ assertTrue(dest.isNull(ii));
+ } else if (dhType == char.class && source.get(ii) == -1) {
+ // this is going to be coerced to null if nullable or else NULL_BYTE if non-nullable
+ assertTrue(dest.isNull(ii) || dest.get(ii) == QueryConstants.NULL_BYTE);
+ } else {
+ assertEquals(source.get(ii), dest.get(ii));
+ }
+ }
+ }
+ }
+
+ new Test(byte.class).doTest();
+ new Test(char.class).doTest();
+ new Test(short.class).doTest();
+ new Test(int.class).doTest();
+ new Test(long.class).doTest();
+ new Test(float.class).doTest();
+ new Test(double.class).doTest();
+ new Test(BigInteger.class).doTest();
+ new Test(BigDecimal.class).doTest();
+ }
+
+ @Test
+ public void testInt16() throws Exception {
+ class Test extends RoundTripTest {
+ Test(Class> dhType) {
+ super(dhType);
+ }
+
+ @Override
+ public Schema newSchema(boolean isNullable) {
+ return createSchema(isNullable, new ArrowType.Int(16, true), dhType);
+ }
+
+ @Override
+ public int initializeRoot(@NotNull SmallIntVector source) {
+ int start = setAll(source::set,
+ QueryConstants.MIN_SHORT, QueryConstants.MAX_SHORT, (short) -1, (short) 0, (short) 1);
+ for (int ii = start; ii < NUM_ROWS; ++ii) {
+ short value = (short) rnd.nextInt();
+ source.set(ii, value);
+ if (value == QueryConstants.NULL_SHORT) {
+ --ii;
+ }
}
+ return NUM_ROWS;
}
- source.setRowCount(numRows);
- };
- final BiConsumer validator = (source, dest) -> {
- IntVector sVector = (IntVector) source.getVector(0);
- IntVector dVector = (IntVector) dest.getVector(0);
- for (int ii = 0; ii < numRows; ii++) {
- if (ii % 2 == 0) {
- assertTrue(dVector.isNull(ii));
- } else {
- assertEquals(sVector.get(ii), dVector.get(ii));
+
+ @Override
+ public void validate(@NotNull SmallIntVector source, @NotNull SmallIntVector dest) {
+ for (int ii = 0; ii < source.getValueCount(); ++ii) {
+ if (source.isNull(ii)) {
+ assertTrue(dest.isNull(ii));
+ } else if (dhType == byte.class) {
+ byte asByte = (byte) source.get(ii);
+ if (asByte == QueryConstants.NULL_BYTE) {
+ assertTrue(dest.isNull(ii) || dest.get(ii) == QueryConstants.NULL_SHORT);
+ } else {
+ assertEquals(asByte, dest.get(ii));
+ }
+ } else if (dhType == char.class && source.get(ii) == -1) {
+ // this is going to be coerced to null if nullable or else NULL_BYTE if non-nullable
+ assertTrue(dest.isNull(ii) || dest.get(ii) == QueryConstants.NULL_SHORT);
+ } else {
+ assertEquals(source.get(ii), dest.get(ii));
+ }
}
}
- };
- final Consumer> runForDhType = dhType -> {
- Schema schema = createSchema(Types.MinorType.INT.getType(), dhType);
- testRoundTrip(dhType, null, schema, setupData, validator);
- };
-
- runForDhType.accept(byte.class);
-// runForDhType.accept(char.class);
- runForDhType.accept(short.class);
- runForDhType.accept(int.class);
- runForDhType.accept(long.class);
- runForDhType.accept(float.class);
- runForDhType.accept(double.class);
- runForDhType.accept(BigInteger.class);
- runForDhType.accept(BigDecimal.class);
+ }
+
+ new Test(byte.class).doTest();
+ new Test(char.class).doTest();
+ new Test(short.class).doTest();
+ new Test(int.class).doTest();
+ new Test(long.class).doTest();
+ new Test(float.class).doTest();
+ new Test(double.class).doTest();
+ new Test(BigInteger.class).doTest();
+ new Test(BigDecimal.class).doTest();
}
- private void testRoundTrip(
- @NotNull final Class> dhType,
- @Nullable final Class> componentType,
- @NotNull final Schema schema,
- @NotNull final Consumer setupData,
- @NotNull final BiConsumer validator) {
- try (VectorSchemaRoot source = VectorSchemaRoot.create(schema, allocator)) {
- source.allocateNew();
- setupData.accept(source);
-
- int flightDescriptorTicketValue = nextTicket++;
- FlightDescriptor descriptor = FlightDescriptor.path("export", flightDescriptorTicketValue + "");
- FlightClient.ClientStreamListener putStream = flightClient.startPut(descriptor, source, new AsyncPutListener());
- putStream.putNext();
- putStream.completed();
-
- // get the table that was uploaded, and confirm it matches what we originally sent
- CompletableFuture
tableFuture = new CompletableFuture<>();
- SessionState.ExportObject
tableExport = currentSession.getExport(flightDescriptorTicketValue);
- currentSession.nonExport()
- .onErrorHandler(exception -> tableFuture.cancel(true))
- .require(tableExport)
- .submit(() -> tableFuture.complete(tableExport.get()));
-
- // block until we're done, so we can get the table and see what is inside
- putStream.getResult();
- Table uploadedTable = tableFuture.get();
- assertEquals(source.getRowCount(), uploadedTable.size());
- assertEquals(1, uploadedTable.getColumnSourceMap().size());
- ColumnSource