org.apache.kyuubi.KyuubiSQLException: Error operating ExecuteStatement: org.apache.spark.sql.AnalysisException: grouping expressions sequence is empty, and 't1.`re_no`' is not an aggregate function. Wrap '(CASE WHEN (count(DISTINCT t1.`vin_17`) < count(t1.`vin_17`)) THEN '是' ELSE '否' END AS `is_vin_17_duplicate`)' in windowing function(s) or wrap 't1.`re_no`' in first() (or first_value) if you don't care which value you get.; CreateHiveTableAsSelectCommand [Database: orca01_dr_data, TableName: ads_rpt_mini_loyalty_customer_t_1124, InsertIntoHiveTable] +- Distinct +- Project [re_no#339, customer_full_name#324, new_vin_17#359, brand_name#353, re_status#369, create_date#375, is_vin_17_duplicate#325, company_name_result#326, paid_amount#500, order_status#328] +- Join LeftOuter, (new_vin_17#359 = vin_17#329) :- SubqueryAlias t1 : +- SubqueryAlias repurchase : +- Aggregate [re_no#339, concat(last_name#395, first_name#394) AS customer_full_name#324, new_vin_17#359, brand_name#353, re_status#369, create_date#375, CASE WHEN (count(distinct vin_17#343) < count(vin_17#343)) THEN 是 ELSE 否 END AS is_vin_17_duplicate#325, CASE WHEN (category#345 = 公司购车) THEN company_name#349 ELSE cast(null as string) END AS company_name_result#326] : +- Filter ((brand_name#353 = MINI) AND (create_date#375 >= 2025-10-01)) : +- Join LeftOuter, (cop_id#341 = cop_id#392) : :- SubqueryAlias t1 : : +- SubqueryAlias spark_catalog.dwc.dwc_dim_com_membership2_bz_repurchase_full_t : : +- HiveTableRelation [`dwc`.`dwc_dim_com_membership2_bz_repurchase_full_t`, org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe, Data Cols: [_hoodie_commit_time#333, _hoodie_commit_seqno#334, _hoodie_record_key#335, _hoodie_partition_pat..., Partition Cols: [pday#384]] : +- SubqueryAlias t2 : +- SubqueryAlias spark_catalog.dwc.dwc_dim_cus_membership2_customer_full_t : +- HiveTableRelation [`dwc`.`dwc_dim_cus_membership2_customer_full_t`, org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe, Data Cols: [_hoodie_commit_time#385, _hoodie_commit_seqno#386, _hoodie_record_key#387, _hoodie_partition_pat..., Partition Cols: [pday#439]] +- SubqueryAlias t2 +- SubqueryAlias ordercenter +- Project [paid_amount#500, status#522 AS order_status#328, vin_17#521 AS vin_17#329, concat(last_name#546, first_name#545) AS concat_name#330, name#544 AS customer_full_name#331] +- Filter ((business_type#456 = NC) AND NOT (deleted#473 = 0)) +- Join LeftOuter, ((order_no#447 = order_no#572) AND (rk#327 = 1)) :- Join LeftOuter, (((order_no#447 = order_no#540) AND (cid#449 = cid#541)) AND ((type#543 = VEHICLE_OWNER) AND NOT (deleted#563 = 0))) : :- Join LeftOuter, (order_no#447 = order_no#518) : : :- Join LeftOuter, (order_no#447 = order_no#494) : : : :- SubqueryAlias t1 : : : : +- SubqueryAlias spark_catalog.dwc.dwc_fact_sal_ordercenter_core_order_full_t : : : : +- HiveTableRelation [`dwc`.`dwc_fact_sal_ordercenter_core_order_full_t`, org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe, Data Cols: [_hoodie_commit_time#440, _hoodie_commit_seqno#441, _hoodie_record_key#442, _hoodie_partition_pat..., Partition Cols: [pday#486]] : : : +- SubqueryAlias a1 : : : +- SubqueryAlias spark_catalog.dwc.dwc_fact_sal_ordercenter_payment_full_t : : : +- HiveTableRelation [`dwc`.`dwc_fact_sal_ordercenter_payment_full_t`, org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe, Data Cols: [_hoodie_commit_time#487, _hoodie_commit_seqno#488, _hoodie_record_key#489, _hoodie_partition_pat..., Partition Cols: [pday#511]] : : +- SubqueryAlias vin : : +- SubqueryAlias spark_catalog.dwc.dwc_fact_sal_ordercenter_vehicle_fulfillment_full_t : : +- HiveTableRelation [`dwc`.`dwc_fact_sal_ordercenter_vehicle_fulfillment_full_t`, org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe, Data Cols: [_hoodie_commit_time#512, _hoodie_commit_seqno#513, _hoodie_record_key#514, _hoodie_partition_pat..., Partition Cols: [pday#533]] : +- SubqueryAlias t4 : +- SubqueryAlias spark_catalog.dwc.dwc_fact_sal_ordercenter_customer_full_t : +- HiveTableRelation [`dwc`.`dwc_fact_sal_ordercenter_customer_full_t`, org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe, Data Cols: [_hoodie_commit_time#534, _hoodie_commit_seqno#535, _hoodie_record_key#536, _hoodie_partition_pat..., Partition Cols: [pday#565]] +- SubqueryAlias log1 +- Project [order_no#572, create_date#575, status#573, rk#327] +- Project [order_no#572, create_date#575, status#573, rk#327, rk#327] +- Window [row_number() windowspecdefinition(order_no#572, create_date#575 ASC NULLS LAST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS rk#327], [order_no#572], [create_date#575 ASC NULLS LAST] +- Project [order_no#572, create_date#575, status#573] +- Filter (type#574 = PAYMENT) +- SubqueryAlias spark_catalog.dwc.dwc_fact_com_ordercenter_core_order_log_full_t +- HiveTableRelation [`dwc`.`dwc_fact_com_ordercenter_core_order_log_full_t`, org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe, Data Cols: [_hoodie_commit_time#566, _hoodie_commit_seqno#567, _hoodie_record_key#568, _hoodie_partition_pat..., Partition Cols: [pday#580]] at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.failAnalysis(CheckAnalysis.scala:50) at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.failAnalysis$(CheckAnalysis.scala:49) at org.apache.spark.sql.catalyst.analysis.Analyzer.failAnalysis(Analyzer.scala:155) at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.checkValidAggregateExpression$1(CheckAnalysis.scala:263) at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.$anonfun$checkAnalysis$15(CheckAnalysis.scala:299) at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.$anonfun$checkAnalysis$15$adapted(CheckAnalysis.scala:299) at scala.collection.immutable.List.foreach(List.scala:392) at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.$anonfun$checkAnalysis$1(CheckAnalysis.scala:299) at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.$anonfun$checkAnalysis$1$adapted(CheckAnalysis.scala:93) at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:183) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:182) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:182) at scala.collection.immutable.List.foreach(List.scala:392) at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:182) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:182) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:182) at scala.collection.immutable.List.foreach(List.scala:392) at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:182) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:182) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:182) at scala.collection.immutable.List.foreach(List.scala:392) at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:182) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:182) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:182) at scala.collection.immutable.List.foreach(List.scala:392) at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:182) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:182) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:182) at scala.collection.immutable.List.foreach(List.scala:392) at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:182) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:182) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:182) at scala.collection.immutable.List.foreach(List.scala:392) at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:182) at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.checkAnalysis(CheckAnalysis.scala:93) at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.checkAnalysis$(CheckAnalysis.scala:90) at org.apache.spark.sql.catalyst.analysis.Analyzer.checkAnalysis(Analyzer.scala:155) at org.apache.spark.sql.catalyst.analysis.Analyzer.$anonfun$executeAndCheck$1(Analyzer.scala:176) at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.markInAnalyzer(AnalysisHelper.scala:228) at org.apache.spark.sql.catalyst.analysis.Analyzer.executeAndCheck(Analyzer.scala:173) at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:73) at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111) at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:143) at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:772) at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:143) at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:73) at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:71) at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:63) at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:98) at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:772) at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:96) at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:615) at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:772) at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:610) at org.apache.kyuubi.engine.spark.operation.ExecuteStatement.$anonfun$executeStatement$1(ExecuteStatement.scala:86) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) at org.apache.kyuubi.engine.spark.operation.SparkOperation.$anonfun$withLocalProperties$1(SparkOperation.scala:147) at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163) at org.apache.kyuubi.engine.spark.operation.SparkOperation.withLocalProperties(SparkOperation.scala:131) at org.apache.kyuubi.engine.spark.operation.ExecuteStatement.executeStatement(ExecuteStatement.scala:81) at org.apache.kyuubi.engine.spark.operation.ExecuteStatement$$anon$1.run(ExecuteStatement.scala:103) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) at org.apache.kyuubi.KyuubiSQLException$.apply(KyuubiSQLException.scala:70) at org.apache.kyuubi.engine.spark.operation.SparkOperation$$anonfun$onError$1.$anonfun$applyOrElse$1(SparkOperation.scala:181) at org.apache.kyuubi.Utils$.withLockRequired(Utils.scala:425) at org.apache.kyuubi.operation.AbstractOperation.withLockRequired(AbstractOperation.scala:52) at org.apache.kyuubi.engine.spark.operation.SparkOperation$$anonfun$onError$1.applyOrElse(SparkOperation.scala:169) at org.apache.kyuubi.engine.spark.operation.SparkOperation$$anonfun$onError$1.applyOrElse(SparkOperation.scala:164) at scala.runtime.AbstractPartialFunction.apply(AbstractPartialFunction.scala:38) at org.apache.kyuubi.engine.spark.operation.ExecuteStatement.executeStatement(ExecuteStatement.scala:92) at org.apache.kyuubi.engine.spark.operation.ExecuteStatement$$anon$1.run(ExecuteStatement.scala:103) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.spark.sql.AnalysisException: grouping expressions sequence is empty, and 't1.`re_no`' is not an aggregate function. Wrap '(CASE WHEN (count(DISTINCT t1.`vin_17`) < count(t1.`vin_17`)) THEN '是' ELSE '否' END AS `is_vin_17_duplicate`)' in windowing function(s) or wrap 't1.`re_no`' in first() (or first_value) if you don't care which value you get.; CreateHiveTableAsSelectCommand [Database: orca01_dr_data, TableName: ads_rpt_mini_loyalty_customer_t_1124, InsertIntoHiveTable] +- Distinct +- Project [re_no#339, customer_full_name#324, new_vin_17#359, brand_name#353, re_status#369, create_date#375, is_vin_17_duplicate#325, company_name_result#326, paid_amount#500, order_status#328] +- Join LeftOuter, (new_vin_17#359 = vin_17#329) :- SubqueryAlias t1 : +- SubqueryAlias repurchase : +- Aggregate [re_no#339, concat(last_name#395, first_name#394) AS customer_full_name#324, new_vin_17#359, brand_name#353, re_status#369, create_date#375, CASE WHEN (count(distinct vin_17#343) < count(vin_17#343)) THEN 是 ELSE 否 END AS is_vin_17_duplicate#325, CASE WHEN (category#345 = 公司购车) THEN company_name#349 ELSE cast(null as string) END AS company_name_result#326] : +- Filter ((brand_name#353 = MINI) AND (create_date#375 >= 2025-10-01)) : +- Join LeftOuter, (cop_id#341 = cop_id#392) : :- SubqueryAlias t1 : : +- SubqueryAlias spark_catalog.dwc.dwc_dim_com_membership2_bz_repurchase_full_t : : +- HiveTableRelation [`dwc`.`dwc_dim_com_membership2_bz_repurchase_full_t`, org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe, Data Cols: [_hoodie_commit_time#333, _hoodie_commit_seqno#334, _hoodie_record_key#335, _hoodie_partition_pat..., Partition Cols: [pday#384]] : +- SubqueryAlias t2 : +- SubqueryAlias spark_catalog.dwc.dwc_dim_cus_membership2_customer_full_t : +- HiveTableRelation [`dwc`.`dwc_dim_cus_membership2_customer_full_t`, org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe, Data Cols: [_hoodie_commit_time#385, _hoodie_commit_seqno#386, _hoodie_record_key#387, _hoodie_partition_pat..., Partition Cols: [pday#439]] +- SubqueryAlias t2 +- SubqueryAlias ordercenter +- Project [paid_amount#500, status#522 AS order_status#328, vin_17#521 AS vin_17#329, concat(last_name#546, first_name#545) AS concat_name#330, name#544 AS customer_full_name#331] +- Filter ((business_type#456 = NC) AND NOT (deleted#473 = 0)) +- Join LeftOuter, ((order_no#447 = order_no#572) AND (rk#327 = 1)) :- Join LeftOuter, (((order_no#447 = order_no#540) AND (cid#449 = cid#541)) AND ((type#543 = VEHICLE_OWNER) AND NOT (deleted#563 = 0))) : :- Join LeftOuter, (order_no#447 = order_no#518) : : :- Join LeftOuter, (order_no#447 = order_no#494) : : : :- SubqueryAlias t1 : : : : +- SubqueryAlias spark_catalog.dwc.dwc_fact_sal_ordercenter_core_order_full_t : : : : +- HiveTableRelation [`dwc`.`dwc_fact_sal_ordercenter_core_order_full_t`, org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe, Data Cols: [_hoodie_commit_time#440, _hoodie_commit_seqno#441, _hoodie_record_key#442, _hoodie_partition_pat..., Partition Cols: [pday#486]] : : : +- SubqueryAlias a1 : : : +- SubqueryAlias spark_catalog.dwc.dwc_fact_sal_ordercenter_payment_full_t : : : +- HiveTableRelation [`dwc`.`dwc_fact_sal_ordercenter_payment_full_t`, org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe, Data Cols: [_hoodie_commit_time#487, _hoodie_commit_seqno#488, _hoodie_record_key#489, _hoodie_partition_pat..., Partition Cols: [pday#511]] : : +- SubqueryAlias vin : : +- SubqueryAlias spark_catalog.dwc.dwc_fact_sal_ordercenter_vehicle_fulfillment_full_t : : +- HiveTableRelation [`dwc`.`dwc_fact_sal_ordercenter_vehicle_fulfillment_full_t`, org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe, Data Cols: [_hoodie_commit_time#512, _hoodie_commit_seqno#513, _hoodie_record_key#514, _hoodie_partition_pat..., Partition Cols: [pday#533]] : +- SubqueryAlias t4 : +- SubqueryAlias spark_catalog.dwc.dwc_fact_sal_ordercenter_customer_full_t : +- HiveTableRelation [`dwc`.`dwc_fact_sal_ordercenter_customer_full_t`, org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe, Data Cols: [_hoodie_commit_time#534, _hoodie_commit_seqno#535, _hoodie_record_key#536, _hoodie_partition_pat..., Partition Cols: [pday#565]] +- SubqueryAlias log1 +- Project [order_no#572, create_date#575, status#573, rk#327] +- Project [order_no#572, create_date#575, status#573, rk#327, rk#327] +- Window [row_number() windowspecdefinition(order_no#572, create_date#575 ASC NULLS LAST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS rk#327], [order_no#572], [create_date#575 ASC NULLS LAST] +- Project [order_no#572, create_date#575, status#573] +- Filter (type#574 = PAYMENT) +- SubqueryAlias spark_catalog.dwc.dwc_fact_com_ordercenter_core_order_log_full_t +- HiveTableRelation [`dwc`.`dwc_fact_com_ordercenter_core_order_log_full_t`, org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe, Data Cols: [_hoodie_commit_time#566, _hoodie_commit_seqno#567, _hoodie_record_key#568, _hoodie_partition_pat..., Partition Cols: [pday#580]] at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.failAnalysis(CheckAnalysis.scala:50) at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.failAnalysis$(CheckAnalysis.scala:49) at org.apache.spark.sql.catalyst.analysis.Analyzer.failAnalysis(Analyzer.scala:155) at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.checkValidAggregateExpression$1(CheckAnalysis.scala:263) at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.$anonfun$checkAnalysis$15(CheckAnalysis.scala:299) at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.$anonfun$checkAnalysis$15$adapted(CheckAnalysis.scala:299) at scala.collection.immutable.List.foreach(List.scala:392) at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.$anonfun$checkAnalysis$1(CheckAnalysis.scala:299) at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.$anonfun$checkAnalysis$1$adapted(CheckAnalysis.scala:93) at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:183) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:182) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:182) at scala.collection.immutable.List.foreach(List.scala:392) at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:182) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:182) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:182) at scala.collection.immutable.List.foreach(List.scala:392) at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:182) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:182) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:182) at scala.collection.immutable.List.foreach(List.scala:392) at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:182) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:182) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:182) at scala.collection.immutable.List.foreach(List.scala:392) at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:182) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:182) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:182) at scala.collection.immutable.List.foreach(List.scala:392) at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:182) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:182) at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:182) at scala.collection.immutable.List.foreach(List.scala:392) at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:182) at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.checkAnalysis(CheckAnalysis.scala:93) at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.checkAnalysis$(CheckAnalysis.scala:90) at org.apache.spark.sql.catalyst.analysis.Analyzer.checkAnalysis(Analyzer.scala:155) at org.apache.spark.sql.catalyst.analysis.Analyzer.$anonfun$executeAndCheck$1(Analyzer.scala:176) at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.markInAnalyzer(AnalysisHelper.scala:228) at org.apache.spark.sql.catalyst.analysis.Analyzer.executeAndCheck(Analyzer.scala:173) at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:73) at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111) at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:143) at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:772) at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:143) at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:73) at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:71) at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:63) at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:98) at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:772) at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:96) at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:615) at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:772) at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:610) at org.apache.kyuubi.engine.spark.operation.ExecuteStatement.$anonfun$executeStatement$1(ExecuteStatement.scala:86) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) at org.apache.kyuubi.engine.spark.operation.SparkOperation.$anonfun$withLocalProperties$1(SparkOperation.scala:147) at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163) at org.apache.kyuubi.engine.spark.operation.SparkOperation.withLocalProperties(SparkOperation.scala:131) at org.apache.kyuubi.engine.spark.operation.ExecuteStatement.executeStatement(ExecuteStatement.scala:81) ... 6 more
执行的SQL语句:
```sql create table orca01_dr_data.ads_rpt_mini_loyalty_customer_t_1124 stored as parquet as with repurchase as( select t1.re_no, concat(t2.last_name,t2.first_name) as customer_full_name, t1.new_vin_17, t1.brand_name, t1.re_status, t1.create_date, CASE WHEN COUNT(DISTINCT vin_17) < COUNT(vin_17) THEN '是' ELSE '否' END AS is_vin_17_duplicate, CASE WHEN t1.category = '公司购车' THEN t1.company_name ELSE NULL END AS company_name_result from dwc.dwc_dim_com_membership2_bz_repurchase_full_t t1 left join dwc.dwc_dim_cus_membership2_customer_full_t t2 on t1.cop_id = t2.cop_id where t1.brand_name = 'MINI' and t1.create_date >= '2025-10-01' ), ordercenter as ( select paid_amount, vin.status as order_status ,vin.vin_17 as vin_17 ,concat(t4.last_name,t4.first_name) as concat_name ,t4.name as customer_full_name from dwc.dwc_fact_sal_ordercenter_core_order_full_t t1 left join dwc.dwc_fact_sal_ordercenter_payment_full_t a1 on t1.order_no = a1.order_no left join dwc.dwc_fact_sal_ordercenter_vehicle_fulfillment_full_t vin on t1.order_no=vin.order_no left join dwc.dwc_fact_sal_ordercenter_customer_full_t t4 on t1.order_no=t4.order_no and t1.cid=t4.cid and t4.type='VEHICLE_OWNER' and t4.deleted != 0 left join (select order_no,create_date,status,row_number() over(partition by order_no order by create_date asc nulls last ) as rk from dwc.dwc_fact_com_ordercenter_core_order_log_full_t where type = 'PAYMENT' ) log1 on t1.order_no=log1.order_no and log1.rk=1 where t1.business_type='NC' and t1.deleted != 0 ) select distinct t1.re_no, t1.customer_full_name, t1.new_vin_17, t1.brand_name, t1.re_status, t1.create_date, is_vin_17_duplicate, company_name_result, paid_amount, order_status from repurchase t1 left join ordercenter t2 on t1.new_vin_17 = t2.vin_17
最新发布