From 32b2c4e0c8dfc4fd720f0345962d571d6361abf9 Mon Sep 17 00:00:00 2001 From: zouyunhe Date: Mon, 13 Jan 2025 20:36:42 +0800 Subject: [PATCH] modify test --- .../hive/GlutenClickHouseHiveTableSuite.scala | 24 ++++++++++++------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/backends-clickhouse/src/test/scala/org/apache/gluten/execution/hive/GlutenClickHouseHiveTableSuite.scala b/backends-clickhouse/src/test/scala/org/apache/gluten/execution/hive/GlutenClickHouseHiveTableSuite.scala index 097ba0b637ef..e0f9fa7512e0 100644 --- a/backends-clickhouse/src/test/scala/org/apache/gluten/execution/hive/GlutenClickHouseHiveTableSuite.scala +++ b/backends-clickhouse/src/test/scala/org/apache/gluten/execution/hive/GlutenClickHouseHiveTableSuite.scala @@ -1651,21 +1651,25 @@ class GlutenClickHouseHiveTableSuite test("GLUTEN-8516: Optimize and filter by move equal conditions ahead") { - def checkConditionsMoveAhead(x: DataFrame): Boolean = { + import org.apache.spark.sql.execution.SparkPlan + + def checkConditionsMoveAhead(plan: SparkPlan): Boolean = { var ruleEffected = false - val plan = x.queryExecution.sparkPlan - plan.children.foreach { + plan match { case f: FilterExecTransformer if f.condition.isInstanceOf[And] => val cond = f.condition.asInstanceOf[And] cond.left match { case e: EqualTo if (e.left.isInstanceOf[Attribute] && e.right.isInstanceOf[Literal]) => ruleEffected = true - case en: EqualNullSafe - if (en.left.isInstanceOf[Attribute] && en.right.isInstanceOf[Literal]) => + case e: EqualNullSafe + if (e.left.isInstanceOf[Attribute] && e.right.isInstanceOf[Literal]) => ruleEffected = true case _ => } - case _ => + case p => + if (!ruleEffected) { + ruleEffected = p.children.exists(c => checkConditionsMoveAhead(c)) + } } ruleEffected } @@ -1673,12 +1677,14 @@ class GlutenClickHouseHiveTableSuite val insert_data_sql = "insert into test_tbl_8516 values(1, 2), (2, 3), (3, 4)" val query_sql_1 = "select count(1) from test_tbl_8516 where cast(b as string) != '' and a = 1" val query_sql_2 = - "select count(1) from test_tbl_8516 where cast(b as string) != '' and a is null" + "select count(1) from test_tbl_8516 where cast(b as string) != '' and a <=> 1" spark.sql(create_table_sql) spark.sql(insert_data_sql) withSQLConf(("spark.gluten.sql.moveAndFilterEqualConditionsAhead.enabled", "true")) { - runQueryAndCompare(query_sql_1)(x => checkConditionsMoveAhead(x)) - runQueryAndCompare(query_sql_2)(x => checkConditionsMoveAhead(x)) + runQueryAndCompare(query_sql_1)( + x => assert(checkConditionsMoveAhead(x.queryExecution.executedPlan))) + runQueryAndCompare(query_sql_2)( + x => assert(checkConditionsMoveAhead(x.queryExecution.executedPlan))) } spark.sql("drop table test_tbl_8516") }