Skip to content

Commit

Permalink
[GLUTEN-1632][CH]Daily Update Clickhouse Version (20240705) (#6338)
Browse files Browse the repository at this point in the history
* [GLUTEN-1632][CH]Daily Update Clickhouse Version (20240705)

* Fix build due to ClickHouse/ClickHouse#61601

---------

Co-authored-by: kyligence-git <[email protected]>
Co-authored-by: Chang Chen <[email protected]>
  • Loading branch information
3 people authored Jul 5, 2024
1 parent f8e6b75 commit 4a674e5
Show file tree
Hide file tree
Showing 6 changed files with 13 additions and 13 deletions.
4 changes: 2 additions & 2 deletions cpp-ch/clickhouse.version
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
CH_ORG=Kyligence
CH_BRANCH=rebase_ch/20240704
CH_COMMIT=f617655ccea
CH_BRANCH=rebase_ch/20240705
CH_COMMIT=531a87ed802

6 changes: 3 additions & 3 deletions cpp-ch/local-engine/Operator/DefaultHashAggregateResult.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -116,18 +116,18 @@ class DefaultHashAggrgateResultTransform : public DB::IProcessor
has_input = true;
output_chunk = DB::Chunk(result_cols, 1);
auto info = std::make_shared<DB::AggregatedChunkInfo>();
output_chunk.setChunkInfo(info);
output_chunk.getChunkInfos().add(std::move(info));
return Status::Ready;
}

input.setNeeded();
if (input.hasData())
{
output_chunk = input.pull(true);
if (!output_chunk.hasChunkInfo())
if (output_chunk.getChunkInfos().empty())
{
auto info = std::make_shared<DB::AggregatedChunkInfo>();
output_chunk.setChunkInfo(info);
output_chunk.getChunkInfos().add(std::move(info));
}
has_input = true;
return Status::Ready;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -121,12 +121,11 @@ void SparkMergeTreeWriter::write(const DB::Block & block)
checkAndMerge();
}

bool SparkMergeTreeWriter::chunkToPart(Chunk && chunk)
bool SparkMergeTreeWriter::chunkToPart(Chunk && plan_chunk)
{
if (chunk.hasChunkInfo())
if (Chunk result_chunk = DB::Squashing::squash(std::move(plan_chunk)))
{
Chunk squash_chunk = DB::Squashing::squash(std::move(chunk));
Block result = header.cloneWithColumns(squash_chunk.getColumns());
auto result = squashing->getHeader().cloneWithColumns(result_chunk.detachColumns());
return blockToPart(result);
}
return false;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ class SparkMergeTreeWriter
void saveMetadata();
void commitPartToRemoteStorageIfNeeded();
void finalizeMerge();
bool chunkToPart(Chunk && chunk);
bool chunkToPart(Chunk && plan_chunk);
bool blockToPart(Block & block);
bool useLocalStorage() const;

Expand Down
4 changes: 2 additions & 2 deletions cpp-ch/local-engine/Storages/SourceFromJavaIter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -109,13 +109,13 @@ DB::Chunk SourceFromJavaIter::generate()
auto info = std::make_shared<DB::AggregatedChunkInfo>();
info->is_overflows = data->info.is_overflows;
info->bucket_num = data->info.bucket_num;
result.setChunkInfo(info);
result.getChunkInfos().add(std::move(info));
}
else
{
result = BlockUtil::buildRowCountChunk(rows);
auto info = std::make_shared<DB::AggregatedChunkInfo>();
result.setChunkInfo(info);
result.getChunkInfos().add(std::move(info));
}
}
return result;
Expand Down
3 changes: 2 additions & 1 deletion cpp-ch/local-engine/tests/gtest_parser.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,8 @@ TEST(LocalExecutor, StorageObjectStorageSink)

/// 2. Create Chunk
/// 3. comsume
sink.consume(testChunk());
Chunk data = testChunk();
sink.consume(data);
sink.onFinish();
}

Expand Down

0 comments on commit 4a674e5

Please sign in to comment.