diff --git a/backends-velox/src/main/scala/org/apache/spark/sql/execution/unsafe/UnsafeBytesBufferArray.scala b/backends-velox/src/main/scala/org/apache/spark/sql/execution/unsafe/UnsafeBytesBufferArray.scala index 7192586cfc0bb..57d961b575d09 100644 --- a/backends-velox/src/main/scala/org/apache/spark/sql/execution/unsafe/UnsafeBytesBufferArray.scala +++ b/backends-velox/src/main/scala/org/apache/spark/sql/execution/unsafe/UnsafeBytesBufferArray.scala @@ -76,15 +76,13 @@ case class UnsafeBytesBufferArray( assert(bytesBuffer.length == bytesBufferLengths(index)) // first to allocate underlying long array if (null == longArray && index == 0) { - log.debug(s"allocate array $totalBytes, actual longArray size ${(totalBytes + 7) / 8}") + log.warn(s"allocate array $totalBytes, actual longArray size ${(totalBytes + 7) / 8}") longArray = allocateArray((totalBytes + 7) / 8) } - if (log.isDebugEnabled) { - log.debug(s"put bytesBuffer at index $index bytesBuffer's length is ${bytesBuffer.length}") - log.debug( - s"bytesBuffer at index $index " + - s"digest ${calculateMD5(bytesBuffer).mkString("Array(", ", ", ")")}") - } + log.warn(s"put bytesBuffer at index $index bytesBuffer's length is ${bytesBuffer.length}") + log.warn( + s"bytesBuffer at index $index " + + s"digest ${calculateMD5(bytesBuffer).mkString("Array(", ", ", ")")}") Platform.copyMemory( bytesBuffer, Platform.BYTE_ARRAY_OFFSET, @@ -104,18 +102,16 @@ case class UnsafeBytesBufferArray( return new Array[Byte](0) } val bytes = new Array[Byte](bytesBufferLengths(index)) - log.debug(s"get bytesBuffer at index $index bytesBuffer length ${bytes.length}") + log.warn(s"get bytesBuffer at index $index bytesBuffer length ${bytes.length}") Platform.copyMemory( longArray.getBaseObject, longArray.getBaseOffset + bytesBufferOffset(index), bytes, Platform.BYTE_ARRAY_OFFSET, bytesBufferLengths(index)) - if (log.isDebugEnabled) { - log.debug( - s"get bytesBuffer at index $index " + - s"digest ${calculateMD5(bytes).mkString("Array(", ", ", ")")}") - } + log.warn( + s"get bytesBuffer at index $index " + + s"digest ${calculateMD5(bytes).mkString("Array(", ", ", ")")}") bytes } @@ -141,7 +137,7 @@ case class UnsafeBytesBufferArray( override def finalize(): Unit = { try { if (longArray != null) { - log.debug(s"BytesArrayInOffheap finalize $arraySize") + log.warn(s"BytesArrayInOffheap finalize $arraySize") freeArray(longArray) longArray = null } diff --git a/backends-velox/src/main/scala/org/apache/spark/sql/execution/unsafe/UnsafeColumnarBuildSideRelation.scala b/backends-velox/src/main/scala/org/apache/spark/sql/execution/unsafe/UnsafeColumnarBuildSideRelation.scala index 5d73ffec024aa..014642becff99 100644 --- a/backends-velox/src/main/scala/org/apache/spark/sql/execution/unsafe/UnsafeColumnarBuildSideRelation.scala +++ b/backends-velox/src/main/scala/org/apache/spark/sql/execution/unsafe/UnsafeColumnarBuildSideRelation.scala @@ -81,6 +81,7 @@ case class UnsafeColumnarBuildSideRelation( ) val batchesSize = bytesBufferArray.length for (i <- 0 until batchesSize) { + log.warn(s"this $i--- ${bytesBufferArray(i).length}") batches.putBytesBuffer(i, bytesBufferArray(i)) } } @@ -94,6 +95,7 @@ case class UnsafeColumnarBuildSideRelation( out.writeLong(batches.totalBytes) for (i <- 0 until batches.arraySize) { val bytes = batches.getBytesBuffer(i) + log.warn(s"writeExternal index $i with length ${bytes.length}") out.write(bytes) } } @@ -107,6 +109,7 @@ case class UnsafeColumnarBuildSideRelation( out.writeLong(batches.totalBytes) for (i <- 0 until batches.arraySize) { val bytes = batches.getBytesBuffer(i) + log.warn(s"write index $i with length ${bytes.length}") out.write(bytes) } } @@ -141,6 +144,7 @@ case class UnsafeColumnarBuildSideRelation( val length = bytesBufferLengths(i) val tmpBuffer = new Array[Byte](length) in.read(tmpBuffer) + log.warn(s"readExternal $i--- $length") batches.putBytesBuffer(i, tmpBuffer) } } @@ -167,6 +171,7 @@ case class UnsafeColumnarBuildSideRelation( val length = bytesBufferLengths(i) val tmpBuffer = new Array[Byte](length) in.read(tmpBuffer) + log.warn(s"read $i--- $length") batches.putBytesBuffer(i, tmpBuffer) } }