From a0dc01428149cf39fa35118afefd0c536607669d Mon Sep 17 00:00:00 2001 From: sundy-li <543950155@qq.com> Date: Fri, 13 Dec 2024 10:24:55 +0800 Subject: [PATCH 1/4] chore(query): fast ifnull typechecker --- .../sql/src/planner/semantic/type_check.rs | 23 +++++++++++++++---- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/src/query/sql/src/planner/semantic/type_check.rs b/src/query/sql/src/planner/semantic/type_check.rs index 0f5bedf86789d..4a8cdb6f94990 100644 --- a/src/query/sql/src/planner/semantic/type_check.rs +++ b/src/query/sql/src/planner/semantic/type_check.rs @@ -3073,11 +3073,24 @@ impl<'a> TypeChecker<'a> { arg_x, ])) } - ("ifnull" | "nvl", args) => { - // Rewrite ifnull(x, y) to coalesce(x, y) - // Rewrite nvl(x, y) to coalesce(x, y) - // nvl is essentially an alias for ifnull. - Some(self.resolve_function(span, "coalesce", vec![], args)) + ("ifnull" | "nvl", &[arg_x, arg_y]) => { + if args.len() == 2 { + // Rewrite ifnull(x, y) | nvl(x, y) to if(is_null(x), y, x) + Some(self.resolve_function(span, "if", vec![], &[ + &Expr::IsNull { + span, + expr: Box::new(arg_x.clone()), + not: false, + }, + arg_y, + arg_x, + ])) + } else { + // Rewrite ifnull(args) to coalesce(x, y) + // Rewrite nvl(args) to coalesce(args) + // nvl is essentially an alias for ifnull. + Some(self.resolve_function(span, "coalesce", vec![], args)) + } } ("nvl2", &[arg_x, arg_y, arg_z]) => { // Rewrite nvl2(x, y, z) to if(is_not_null(x), y, z) From 968da990ce3fd71575a3781c9ff23b5fff02ed32 Mon Sep 17 00:00:00 2001 From: sundy-li <543950155@qq.com> Date: Fri, 13 Dec 2024 11:05:53 +0800 Subject: [PATCH 2/4] chore(query): revert flight ipc --- .../io/ipc/read/array/fixed_size_list.rs | 2 - .../arrow/src/arrow/io/ipc/read/array/list.rs | 2 - .../arrow/src/arrow/io/ipc/read/array/map.rs | 2 - .../arrow/src/arrow/io/ipc/read/array/mod.rs | 3 - .../src/arrow/io/ipc/read/array/struct_.rs | 2 - .../src/arrow/io/ipc/read/array/union.rs | 2 - .../arrow/src/arrow/io/ipc/read/common.rs | 8 -- .../src/arrow/io/ipc/read/deserialize.rs | 34 +-------- .../arrow/src/arrow/io/ipc/read/read_basic.rs | 76 ------------------- .../arrow/src/arrow/io/ipc/read/schema.rs | 4 +- .../arrow/src/arrow/io/ipc/write/common.rs | 59 +------------- .../arrow/src/arrow/io/ipc/write/mod.rs | 2 + .../arrow/src/arrow/io/ipc/write/schema.rs | 6 +- .../arrow/src/arrow/io/ipc/write/serialize.rs | 54 +------------ 14 files changed, 12 insertions(+), 244 deletions(-) diff --git a/src/common/arrow/src/arrow/io/ipc/read/array/fixed_size_list.rs b/src/common/arrow/src/arrow/io/ipc/read/array/fixed_size_list.rs index 3650f6a811c39..ee7c48b5d4d3a 100644 --- a/src/common/arrow/src/arrow/io/ipc/read/array/fixed_size_list.rs +++ b/src/common/arrow/src/arrow/io/ipc/read/array/fixed_size_list.rs @@ -34,7 +34,6 @@ use crate::arrow::error::Result; #[allow(clippy::too_many_arguments)] pub fn read_fixed_size_list( field_nodes: &mut VecDeque, - variadic_buffer_counts: &mut VecDeque, data_type: DataType, ipc_field: &IpcField, buffers: &mut VecDeque, @@ -70,7 +69,6 @@ pub fn read_fixed_size_list( let values = read( field_nodes, - variadic_buffer_counts, field, &ipc_field.fields[0], buffers, diff --git a/src/common/arrow/src/arrow/io/ipc/read/array/list.rs b/src/common/arrow/src/arrow/io/ipc/read/array/list.rs index d3a931cf7df26..089e8323df0b5 100644 --- a/src/common/arrow/src/arrow/io/ipc/read/array/list.rs +++ b/src/common/arrow/src/arrow/io/ipc/read/array/list.rs @@ -38,7 +38,6 @@ use crate::arrow::offset::Offset; #[allow(clippy::too_many_arguments)] pub fn read_list( field_nodes: &mut VecDeque, - variadic_buffer_counts: &mut VecDeque, data_type: DataType, ipc_field: &IpcField, buffers: &mut VecDeque, @@ -95,7 +94,6 @@ where let values = read( field_nodes, - variadic_buffer_counts, field, &ipc_field.fields[0], buffers, diff --git a/src/common/arrow/src/arrow/io/ipc/read/array/map.rs b/src/common/arrow/src/arrow/io/ipc/read/array/map.rs index 9c9576455a876..2335d105cbfe2 100644 --- a/src/common/arrow/src/arrow/io/ipc/read/array/map.rs +++ b/src/common/arrow/src/arrow/io/ipc/read/array/map.rs @@ -36,7 +36,6 @@ use crate::arrow::error::Result; #[allow(clippy::too_many_arguments)] pub fn read_map( field_nodes: &mut VecDeque, - variadic_buffer_counts: &mut VecDeque, data_type: DataType, ipc_field: &IpcField, buffers: &mut VecDeque, @@ -90,7 +89,6 @@ pub fn read_map( let field = read( field_nodes, - variadic_buffer_counts, field, &ipc_field.fields[0], buffers, diff --git a/src/common/arrow/src/arrow/io/ipc/read/array/mod.rs b/src/common/arrow/src/arrow/io/ipc/read/array/mod.rs index fbef1718964b0..4d0252e3cffca 100644 --- a/src/common/arrow/src/arrow/io/ipc/read/array/mod.rs +++ b/src/common/arrow/src/arrow/io/ipc/read/array/mod.rs @@ -35,8 +35,5 @@ mod dictionary; pub use dictionary::*; mod union; pub use union::*; -mod binview; -pub use binview::*; mod map; - pub use map::*; diff --git a/src/common/arrow/src/arrow/io/ipc/read/array/struct_.rs b/src/common/arrow/src/arrow/io/ipc/read/array/struct_.rs index d4e7fcf702a28..55381f0ae5a0b 100644 --- a/src/common/arrow/src/arrow/io/ipc/read/array/struct_.rs +++ b/src/common/arrow/src/arrow/io/ipc/read/array/struct_.rs @@ -34,7 +34,6 @@ use crate::arrow::error::Result; #[allow(clippy::too_many_arguments)] pub fn read_struct( field_nodes: &mut VecDeque, - variadic_buffer_counts: &mut VecDeque, data_type: DataType, ipc_field: &IpcField, buffers: &mut VecDeque, @@ -72,7 +71,6 @@ pub fn read_struct( .map(|(field, ipc_field)| { read( field_nodes, - variadic_buffer_counts, field, ipc_field, buffers, diff --git a/src/common/arrow/src/arrow/io/ipc/read/array/union.rs b/src/common/arrow/src/arrow/io/ipc/read/array/union.rs index f4e0ea8ea1733..271b4b2a96223 100644 --- a/src/common/arrow/src/arrow/io/ipc/read/array/union.rs +++ b/src/common/arrow/src/arrow/io/ipc/read/array/union.rs @@ -36,7 +36,6 @@ use crate::arrow::error::Result; #[allow(clippy::too_many_arguments)] pub fn read_union( field_nodes: &mut VecDeque, - variadic_buffer_counts: &mut VecDeque, data_type: DataType, ipc_field: &IpcField, buffers: &mut VecDeque, @@ -103,7 +102,6 @@ pub fn read_union( .map(|(field, ipc_field)| { read( field_nodes, - variadic_buffer_counts, field, ipc_field, buffers, diff --git a/src/common/arrow/src/arrow/io/ipc/read/common.rs b/src/common/arrow/src/arrow/io/ipc/read/common.rs index 54fa59ca96c58..46affe9034013 100644 --- a/src/common/arrow/src/arrow/io/ipc/read/common.rs +++ b/src/common/arrow/src/arrow/io/ipc/read/common.rs @@ -53,7 +53,6 @@ impl<'a, A, I: Iterator> ProjectionIter<'a, A, I> { /// # Panics /// iff `projection` is empty pub fn new(projection: &'a [usize], iter: I) -> Self { - assert!(!projection.is_empty(), "projection cannot be empty"); Self { projection: &projection[1..], iter, @@ -114,11 +113,6 @@ pub fn read_record_batch( .buffers() .map_err(|err| Error::from(OutOfSpecKind::InvalidFlatbufferBuffers(err)))? .ok_or_else(|| Error::from(OutOfSpecKind::MissingMessageBuffers))?; - let mut variadic_buffer_counts = batch - .variadic_buffer_counts() - .map_err(|err| Error::from(OutOfSpecKind::InvalidFlatbufferRecordBatches(err)))? - .map(|v| v.iter().map(|v| v as usize).collect::>()) - .unwrap_or_else(VecDeque::new); let mut buffers: VecDeque = buffers.iter().collect(); // check that the sum of the sizes of all buffers is <= than the size of the file @@ -153,7 +147,6 @@ pub fn read_record_batch( .map(|maybe_field| match maybe_field { ProjectionResult::Selected((field, ipc_field)) => Ok(Some(read( &mut field_nodes, - &mut variadic_buffer_counts, field, ipc_field, &mut buffers, @@ -182,7 +175,6 @@ pub fn read_record_batch( .map(|(field, ipc_field)| { read( &mut field_nodes, - &mut variadic_buffer_counts, field, ipc_field, &mut buffers, diff --git a/src/common/arrow/src/arrow/io/ipc/read/deserialize.rs b/src/common/arrow/src/arrow/io/ipc/read/deserialize.rs index 18038c0b9d751..0905569248550 100644 --- a/src/common/arrow/src/arrow/io/ipc/read/deserialize.rs +++ b/src/common/arrow/src/arrow/io/ipc/read/deserialize.rs @@ -34,7 +34,6 @@ use crate::arrow::io::ipc::IpcField; #[allow(clippy::too_many_arguments)] pub fn read( field_nodes: &mut VecDeque, - variadic_buffer_counts: &mut VecDeque, field: &Field, ipc_field: &IpcField, buffers: &mut VecDeque, @@ -140,7 +139,6 @@ pub fn read( .map(|x| x.boxed()), List => read_list::( field_nodes, - variadic_buffer_counts, data_type, ipc_field, buffers, @@ -156,7 +154,6 @@ pub fn read( .map(|x| x.boxed()), LargeList => read_list::( field_nodes, - variadic_buffer_counts, data_type, ipc_field, buffers, @@ -172,7 +169,6 @@ pub fn read( .map(|x| x.boxed()), FixedSizeList => read_fixed_size_list( field_nodes, - variadic_buffer_counts, data_type, ipc_field, buffers, @@ -188,7 +184,6 @@ pub fn read( .map(|x| x.boxed()), Struct => read_struct( field_nodes, - variadic_buffer_counts, data_type, ipc_field, buffers, @@ -222,7 +217,6 @@ pub fn read( } Union => read_union( field_nodes, - variadic_buffer_counts, data_type, ipc_field, buffers, @@ -238,7 +232,6 @@ pub fn read( .map(|x| x.boxed()), Map => read_map( field_nodes, - variadic_buffer_counts, data_type, ipc_field, buffers, @@ -252,30 +245,7 @@ pub fn read( scratch, ) .map(|x| x.boxed()), - Utf8View => read_binview::( - field_nodes, - variadic_buffer_counts, - data_type, - buffers, - reader, - block_offset, - is_little_endian, - compression, - limit, - scratch, - ), - BinaryView => read_binview::<[u8], _>( - field_nodes, - variadic_buffer_counts, - data_type, - buffers, - reader, - block_offset, - is_little_endian, - compression, - limit, - scratch, - ), + BinaryView | Utf8View => unimplemented!("BinaryView and Utf8View are not supported"), } } @@ -299,6 +269,6 @@ pub fn skip( Dictionary(_) => skip_dictionary(field_nodes, buffers), Union => skip_union(field_nodes, data_type, buffers), Map => skip_map(field_nodes, data_type, buffers), - BinaryView | Utf8View => todo!(), + BinaryView | Utf8View => unimplemented!("BinaryView and Utf8View are not supported"), } } diff --git a/src/common/arrow/src/arrow/io/ipc/read/read_basic.rs b/src/common/arrow/src/arrow/io/ipc/read/read_basic.rs index 1cf0599687472..e3d82afa52ed8 100644 --- a/src/common/arrow/src/arrow/io/ipc/read/read_basic.rs +++ b/src/common/arrow/src/arrow/io/ipc/read/read_basic.rs @@ -65,23 +65,6 @@ fn read_swapped( Ok(()) } -fn read_uncompressed_bytes( - reader: &mut R, - buffer_length: usize, - is_little_endian: bool, -) -> Result> { - if is_native_little_endian() == is_little_endian { - let mut buffer = Vec::with_capacity(buffer_length); - let _ = reader - .take(buffer_length as u64) - .read_to_end(&mut buffer) - .unwrap(); - Ok(buffer) - } else { - unreachable!() - } -} - fn read_uncompressed_buffer( reader: &mut R, buffer_length: usize, @@ -167,65 +150,6 @@ fn read_compressed_buffer( Ok(buffer) } -fn read_compressed_bytes( - reader: &mut R, - buffer_length: usize, - is_little_endian: bool, - compression: Compression, - scratch: &mut Vec, -) -> Result> { - read_compressed_buffer::( - reader, - buffer_length, - buffer_length, - is_little_endian, - compression, - scratch, - ) -} - -pub fn read_bytes( - buf: &mut VecDeque, - reader: &mut R, - block_offset: u64, - is_little_endian: bool, - compression: Option, - scratch: &mut Vec, -) -> Result> { - let buf = buf - .pop_front() - .ok_or_else(|| Error::oos(format!("out-of-spec: {:?}", OutOfSpecKind::ExpectedBuffer)))?; - - let offset: u64 = buf.offset().try_into().map_err(|_| { - Error::oos(format!( - "out-of-spec: {:?}", - OutOfSpecKind::NegativeFooterLength - )) - })?; - - let buffer_length: usize = buf.length().try_into().map_err(|_| { - Error::oos(format!( - "out-of-spec: {:?}", - OutOfSpecKind::NegativeFooterLength - )) - })?; - - reader.seek(SeekFrom::Start(block_offset + offset))?; - - if let Some(compression) = compression { - Ok(read_compressed_bytes( - reader, - buffer_length, - is_little_endian, - compression, - scratch, - )? - .into()) - } else { - Ok(read_uncompressed_bytes(reader, buffer_length, is_little_endian)?.into()) - } -} - pub fn read_buffer( buf: &mut VecDeque, length: usize, // in slots diff --git a/src/common/arrow/src/arrow/io/ipc/read/schema.rs b/src/common/arrow/src/arrow/io/ipc/read/schema.rs index 690cca728ba4f..b2e032aaab8b3 100644 --- a/src/common/arrow/src/arrow/io/ipc/read/schema.rs +++ b/src/common/arrow/src/arrow/io/ipc/read/schema.rs @@ -292,8 +292,6 @@ fn get_data_type( LargeBinary(_) => (DataType::LargeBinary, IpcField::default()), Utf8(_) => (DataType::Utf8, IpcField::default()), LargeUtf8(_) => (DataType::LargeUtf8, IpcField::default()), - BinaryView(_) => (DataType::BinaryView, IpcField::default()), - Utf8View(_) => (DataType::Utf8View, IpcField::default()), FixedSizeBinary(fixed) => ( DataType::FixedSizeBinary( fixed @@ -366,7 +364,7 @@ fn get_data_type( Struct(_) => deserialize_struct(field)?, Union(union_) => deserialize_union(union_, field)?, Map(map) => deserialize_map(map, field)?, - RunEndEncoded(_) | LargeListView(_) | ListView(_) => unimplemented!(), + _other => unimplemented!("BinaryView and Utf8View are not supported"), }) } diff --git a/src/common/arrow/src/arrow/io/ipc/write/common.rs b/src/common/arrow/src/arrow/io/ipc/write/common.rs index fe55287c6c307..8d0e82337aa9c 100644 --- a/src/common/arrow/src/arrow/io/ipc/write/common.rs +++ b/src/common/arrow/src/arrow/io/ipc/write/common.rs @@ -56,7 +56,7 @@ fn encode_dictionary( use PhysicalType::*; match array.data_type().to_physical_type() { Utf8 | LargeUtf8 | Binary | LargeBinary | Primitive(_) | Boolean | Null - | FixedSizeBinary | BinaryView | Utf8View => Ok(()), + | FixedSizeBinary => Ok(()), Dictionary(key_type) => match_integer_type!(key_type, |$T| { let dict_id = field.dictionary_id .ok_or_else(|| Error::InvalidArgumentError("Dictionaries must have an associated id".to_string()))?; @@ -185,6 +185,7 @@ fn encode_dictionary( encoded_dictionaries, ) } + BinaryView | Utf8View => unimplemented!("BinaryView and Utf8View are not supported"), } } @@ -247,41 +248,6 @@ fn serialize_compression( } } -fn set_variadic_buffer_counts(counts: &mut Vec, array: &dyn Array) { - match array.data_type() { - DataType::Utf8View => { - let array = array.as_any().downcast_ref::().unwrap(); - counts.push(array.data_buffers().len() as i64); - } - DataType::BinaryView => { - let array = array.as_any().downcast_ref::().unwrap(); - counts.push(array.data_buffers().len() as i64); - } - DataType::Struct(_) => { - let array = array.as_any().downcast_ref::().unwrap(); - for array in array.values() { - set_variadic_buffer_counts(counts, array.as_ref()) - } - } - DataType::LargeList(_) => { - let array = array.as_any().downcast_ref::>().unwrap(); - set_variadic_buffer_counts(counts, array.values().as_ref()) - } - DataType::FixedSizeList(_, _) => { - let array = array.as_any().downcast_ref::().unwrap(); - set_variadic_buffer_counts(counts, array.values().as_ref()) - } - DataType::Dictionary(_, _, _) => { - let array = array - .as_any() - .downcast_ref::>() - .unwrap(); - set_variadic_buffer_counts(counts, array.values().as_ref()) - } - _ => (), - } -} - /// Write [`Chunk`] into two sets of bytes, one for the header (ipc::Schema::Message) and the /// other for the batch's data fn chunk_to_bytes_amortized( @@ -295,9 +261,7 @@ fn chunk_to_bytes_amortized( arrow_data.clear(); let mut offset = 0; - let mut variadic_buffer_counts = vec![]; for array in chunk.arrays() { - set_variadic_buffer_counts(&mut variadic_buffer_counts, array.as_ref()); write( array.as_ref(), &mut buffers, @@ -309,12 +273,6 @@ fn chunk_to_bytes_amortized( ) } - let variadic_buffer_counts = if variadic_buffer_counts.is_empty() { - None - } else { - Some(variadic_buffer_counts) - }; - let compression = serialize_compression(options.compression); let message = arrow_format::ipc::Message { @@ -325,7 +283,7 @@ fn chunk_to_bytes_amortized( nodes: Some(nodes), buffers: Some(buffers), compression, - variadic_buffer_counts, + variadic_buffer_counts: None, }, ))), body_length: arrow_data.len() as i64, @@ -350,15 +308,6 @@ fn dictionary_batch_to_bytes( let mut buffers: Vec = vec![]; let mut arrow_data: Vec = vec![]; - let mut variadic_buffer_counts = vec![]; - set_variadic_buffer_counts(&mut variadic_buffer_counts, array.values().as_ref()); - - let variadic_buffer_counts = if variadic_buffer_counts.is_empty() { - None - } else { - Some(variadic_buffer_counts) - }; - let length = write_dictionary( array, &mut buffers, @@ -382,7 +331,7 @@ fn dictionary_batch_to_bytes( nodes: Some(nodes), buffers: Some(buffers), compression, - variadic_buffer_counts, + variadic_buffer_counts: None, })), is_delta: false, }, diff --git a/src/common/arrow/src/arrow/io/ipc/write/mod.rs b/src/common/arrow/src/arrow/io/ipc/write/mod.rs index e6d2dfb03588d..42097c1f87029 100644 --- a/src/common/arrow/src/arrow/io/ipc/write/mod.rs +++ b/src/common/arrow/src/arrow/io/ipc/write/mod.rs @@ -31,6 +31,8 @@ pub use writer::FileWriter; pub(crate) mod common_sync; +mod common_async; + use super::IpcField; use crate::arrow::datatypes::DataType; use crate::arrow::datatypes::Field; diff --git a/src/common/arrow/src/arrow/io/ipc/write/schema.rs b/src/common/arrow/src/arrow/io/ipc/write/schema.rs index 4f2740843f6c5..0389e859c8a46 100644 --- a/src/common/arrow/src/arrow/io/ipc/write/schema.rs +++ b/src/common/arrow/src/arrow/io/ipc/write/schema.rs @@ -274,8 +274,7 @@ fn serialize_type(data_type: &DataType) -> arrow_format::ipc::Type { Struct(_) => ipc::Type::Struct(Box::new(ipc::Struct {})), Dictionary(_, v, _) => serialize_type(v), Extension(_, v, _) => serialize_type(v), - Utf8View => ipc::Type::Utf8View(Box::new(ipc::Utf8View {})), - BinaryView => ipc::Type::BinaryView(Box::new(ipc::BinaryView {})), + BinaryView | Utf8View => unimplemented!("BinaryView and Utf8View are not supported"), } } @@ -307,8 +306,6 @@ fn serialize_children(data_type: &DataType, ipc_field: &IpcField) -> Vec vec![], FixedSizeList(inner, _) | LargeList(inner) | List(inner) | Map(inner, _) => { @@ -321,6 +318,7 @@ fn serialize_children(data_type: &DataType, ipc_field: &IpcField) -> Vec serialize_children(inner, ipc_field), Extension(_, inner, _) => serialize_children(inner, ipc_field), + BinaryView | Utf8View => unimplemented!("BinaryView and Utf8View are not supported"), } } diff --git a/src/common/arrow/src/arrow/io/ipc/write/serialize.rs b/src/common/arrow/src/arrow/io/ipc/write/serialize.rs index 21879ff2c8cd6..e2fdb6ee5a5ff 100644 --- a/src/common/arrow/src/arrow/io/ipc/write/serialize.rs +++ b/src/common/arrow/src/arrow/io/ipc/write/serialize.rs @@ -441,43 +441,6 @@ pub(super) fn write_dictionary( } } -#[allow(clippy::too_many_arguments)] -pub(super) fn write_binview( - array: &BinaryViewArrayGeneric, - buffers: &mut Vec, - arrow_data: &mut Vec, - offset: &mut i64, - is_little_endian: bool, - compression: Option, -) { - let array = if array.is_sliced() { - array.clone().maybe_gc() - } else { - array.clone() - }; - write_bitmap( - array.validity(), - Array::len(&array), - buffers, - arrow_data, - offset, - compression, - ); - - write_buffer( - array.views(), - buffers, - arrow_data, - offset, - is_little_endian, - compression, - ); - - for data in array.data_buffers().as_ref() { - write_bytes(data, buffers, arrow_data, offset, compression); - } -} - /// Writes an [`Array`] to `arrow_data` pub fn write( array: &dyn Array, @@ -617,22 +580,7 @@ pub fn write( compression, ); } - Utf8View => write_binview( - array.as_any().downcast_ref::().unwrap(), - buffers, - arrow_data, - offset, - is_little_endian, - compression, - ), - BinaryView => write_binview( - array.as_any().downcast_ref::().unwrap(), - buffers, - arrow_data, - offset, - is_little_endian, - compression, - ), + BinaryView | Utf8View => unimplemented!("BinaryView and Utf8View are not supported"), } } From ad7d4938277333ca2085d95f8aa0b0d5c76c66ad Mon Sep 17 00:00:00 2001 From: sundy-li <543950155@qq.com> Date: Fri, 13 Dec 2024 14:14:50 +0800 Subject: [PATCH 3/4] fix --- src/query/sql/src/planner/semantic/type_check.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/query/sql/src/planner/semantic/type_check.rs b/src/query/sql/src/planner/semantic/type_check.rs index 4a8cdb6f94990..3b99f521844b3 100644 --- a/src/query/sql/src/planner/semantic/type_check.rs +++ b/src/query/sql/src/planner/semantic/type_check.rs @@ -3073,17 +3073,17 @@ impl<'a> TypeChecker<'a> { arg_x, ])) } - ("ifnull" | "nvl", &[arg_x, arg_y]) => { + ("ifnull" | "nvl", args) => { if args.len() == 2 { // Rewrite ifnull(x, y) | nvl(x, y) to if(is_null(x), y, x) Some(self.resolve_function(span, "if", vec![], &[ &Expr::IsNull { span, - expr: Box::new(arg_x.clone()), + expr: Box::new(args[0].clone()), not: false, }, - arg_y, - arg_x, + &args[1], + &args[0], ])) } else { // Rewrite ifnull(args) to coalesce(x, y) From 525ef4858959289f0edd2eec4c4b7414a1d38c02 Mon Sep 17 00:00:00 2001 From: sundy-li <543950155@qq.com> Date: Fri, 13 Dec 2024 14:40:29 +0800 Subject: [PATCH 4/4] fix --- src/query/sql/src/planner/semantic/type_check.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/query/sql/src/planner/semantic/type_check.rs b/src/query/sql/src/planner/semantic/type_check.rs index 3b99f521844b3..9baafae54d6c3 100644 --- a/src/query/sql/src/planner/semantic/type_check.rs +++ b/src/query/sql/src/planner/semantic/type_check.rs @@ -3082,8 +3082,8 @@ impl<'a> TypeChecker<'a> { expr: Box::new(args[0].clone()), not: false, }, - &args[1], - &args[0], + args[1], + args[0], ])) } else { // Rewrite ifnull(args) to coalesce(x, y)