@@ -713,6 +713,66 @@ class JDBCV2Suite extends QueryTest with SharedSparkSession with ExplainSuiteHel
713
713
checkAnswer(df, Seq (Row (1 , 2 ), Row (2 , 2 ), Row (6 , 1 )))
714
714
}
715
715
716
+ test(" scan with aggregate push-down: VAR_POP VAR_SAMP with filter and group by" ) {
717
+ val df = sql(" select VAR_POP(bonus), VAR_SAMP(bonus) FROM h2.test.employee where dept > 0" +
718
+ " group by DePt" )
719
+ checkFiltersRemoved(df)
720
+ checkAggregateRemoved(df)
721
+ df.queryExecution.optimizedPlan.collect {
722
+ case _ : DataSourceV2ScanRelation =>
723
+ val expected_plan_fragment =
724
+ " PushedAggregates: [VAR_POP(BONUS), VAR_SAMP(BONUS)], " +
725
+ " PushedFilters: [IsNotNull(DEPT), GreaterThan(DEPT,0)], " +
726
+ " PushedGroupByColumns: [DEPT]"
727
+ checkKeywordsExistsInExplain(df, expected_plan_fragment)
728
+ }
729
+ checkAnswer(df, Seq (Row (10000d , 20000d ), Row (2500d , 5000d ), Row (0d , null )))
730
+ }
731
+
732
+ test(" scan with aggregate push-down: STDDEV_POP STDDEV_SAMP with filter and group by" ) {
733
+ val df = sql(" select STDDEV_POP(bonus), STDDEV_SAMP(bonus) FROM h2.test.employee" +
734
+ " where dept > 0 group by DePt" )
735
+ checkFiltersRemoved(df)
736
+ checkAggregateRemoved(df)
737
+ df.queryExecution.optimizedPlan.collect {
738
+ case _ : DataSourceV2ScanRelation =>
739
+ val expected_plan_fragment =
740
+ " PushedAggregates: [STDDEV_POP(BONUS), STDDEV_SAMP(BONUS)], " +
741
+ " PushedFilters: [IsNotNull(DEPT), GreaterThan(DEPT,0)], " +
742
+ " PushedGroupByColumns: [DEPT]"
743
+ checkKeywordsExistsInExplain(df, expected_plan_fragment)
744
+ }
745
+ checkAnswer(df, Seq (Row (100d , 141.4213562373095d ), Row (50d , 70.71067811865476d ), Row (0d , null )))
746
+ }
747
+
748
+ test(" scan with aggregate push-down: COVAR_POP COVAR_SAMP with filter and group by" ) {
749
+ val df = sql(" select COVAR_POP(bonus, bonus), COVAR_SAMP(bonus, bonus)" +
750
+ " FROM h2.test.employee where dept > 0 group by DePt" )
751
+ checkFiltersRemoved(df)
752
+ checkAggregateRemoved(df, false )
753
+ df.queryExecution.optimizedPlan.collect {
754
+ case _ : DataSourceV2ScanRelation =>
755
+ val expected_plan_fragment =
756
+ " PushedFilters: [IsNotNull(DEPT), GreaterThan(DEPT,0)]"
757
+ checkKeywordsExistsInExplain(df, expected_plan_fragment)
758
+ }
759
+ checkAnswer(df, Seq (Row (10000d , 20000d ), Row (2500d , 5000d ), Row (0d , null )))
760
+ }
761
+
762
+ test(" scan with aggregate push-down: CORR with filter and group by" ) {
763
+ val df = sql(" select CORR(bonus, bonus) FROM h2.test.employee where dept > 0" +
764
+ " group by DePt" )
765
+ checkFiltersRemoved(df)
766
+ checkAggregateRemoved(df, false )
767
+ df.queryExecution.optimizedPlan.collect {
768
+ case _ : DataSourceV2ScanRelation =>
769
+ val expected_plan_fragment =
770
+ " PushedFilters: [IsNotNull(DEPT), GreaterThan(DEPT,0)]"
771
+ checkKeywordsExistsInExplain(df, expected_plan_fragment)
772
+ }
773
+ checkAnswer(df, Seq (Row (1d ), Row (1d ), Row (null )))
774
+ }
775
+
716
776
test(" scan with aggregate push-down: aggregate over alias NOT push down" ) {
717
777
val cols = Seq (" a" , " b" , " c" , " d" )
718
778
val df1 = sql(" select * from h2.test.employee" ).toDF(cols : _* )
0 commit comments