diff --git a/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala b/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala index f8395a691a..cfa1957695 100644 --- a/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala +++ b/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala @@ -163,6 +163,7 @@ object QueryPlanSerde extends Logging with CometExprShim { classOf[BitLength] -> CometScalarFunction("bit_length"), classOf[Chr] -> CometScalarFunction("char"), classOf[ConcatWs] -> CometScalarFunction("concat_ws"), + classOf[Concat] -> CometScalarFunction("concat"), classOf[Contains] -> CometScalarFunction("contains"), classOf[EndsWith] -> CometScalarFunction("ends_with"), classOf[InitCap] -> CometInitCap, diff --git a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala index 9085c0fa29..ea7545382f 100644 --- a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala +++ b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala @@ -3204,4 +3204,15 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper { } } + test("test concat function - strings") { + withTable("t1") { + sql( + "create table t1 using parquet as select uuid() c1, uuid() c2, uuid() c3, uuid() c4, cast(null as string) c5 from range(10)") + checkSparkAnswerAndOperator("select concat(c1, c2) AS x FROM t1") + checkSparkAnswerAndOperator("select concat(c1, c2, c3) AS x FROM t1") + checkSparkAnswerAndOperator("select concat(c1, c2, c3, c5) AS x FROM t1") + //checkSparkAnswerAndOperator("select concat(concat(c1, c2, c3), concat(c1, c3)) AS x FROM t1") + } + } + }