Skip to content

Commit

Permalink
SPARK-33152. fix test failures after merge
Browse files Browse the repository at this point in the history
  • Loading branch information
ashahid committed Feb 28, 2025
1 parent 4ea624f commit dd02261
Show file tree
Hide file tree
Showing 10 changed files with 18 additions and 18 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -85,15 +85,15 @@ Input [7]: [ss_item_sk#1, ss_store_sk#2, ss_sales_price#3, ss_sold_date_sk#4, d_
Output [3]: [s_store_sk#9, s_store_name#10, s_company_name#11]
Batched: true
Location [not included in comparison]/{warehouse_dir}/store]
PushedFilters: [IsNotNull(s_store_sk), IsNotNull(s_company_name), IsNotNull(s_store_name)]
PushedFilters: [IsNotNull(s_store_sk), IsNotNull(s_store_name), IsNotNull(s_company_name)]
ReadSchema: struct<s_store_sk:int,s_store_name:string,s_company_name:string>

(8) ColumnarToRow [codegen id : 2]
Input [3]: [s_store_sk#9, s_store_name#10, s_company_name#11]

(9) Filter [codegen id : 2]
Input [3]: [s_store_sk#9, s_store_name#10, s_company_name#11]
Condition : ((isnotnull(s_store_sk#9) AND isnotnull(s_company_name#11)) AND isnotnull(s_store_name#10))
Condition : ((isnotnull(s_store_sk#9) AND isnotnull(s_store_name#10)) AND isnotnull(s_company_name#11))

(10) BroadcastExchange
Input [3]: [s_store_sk#9, s_store_name#10, s_company_name#11]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ TakeOrderedAndProject [sum_sales,avg_monthly_sales,s_store_name,i_category,i_bra
InputAdapter
BroadcastExchange #6
WholeStageCodegen (2)
Filter [s_store_sk,s_company_name,s_store_name]
Filter [s_store_sk,s_store_name,s_company_name]
ColumnarToRow
InputAdapter
Scan parquet spark_catalog.default.store [s_store_sk,s_store_name,s_company_name]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,15 +106,15 @@ Input [8]: [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, ss_sold_da
Output [3]: [s_store_sk#12, s_store_name#13, s_company_name#14]
Batched: true
Location [not included in comparison]/{warehouse_dir}/store]
PushedFilters: [IsNotNull(s_store_sk), IsNotNull(s_company_name), IsNotNull(s_store_name)]
PushedFilters: [IsNotNull(s_store_sk), IsNotNull(s_store_name), IsNotNull(s_company_name)]
ReadSchema: struct<s_store_sk:int,s_store_name:string,s_company_name:string>

(14) ColumnarToRow [codegen id : 3]
Input [3]: [s_store_sk#12, s_store_name#13, s_company_name#14]

(15) Filter [codegen id : 3]
Input [3]: [s_store_sk#12, s_store_name#13, s_company_name#14]
Condition : ((isnotnull(s_store_sk#12) AND isnotnull(s_company_name#14)) AND isnotnull(s_store_name#13))
Condition : ((isnotnull(s_store_sk#12) AND isnotnull(s_store_name#13)) AND isnotnull(s_company_name#14))

(16) BroadcastExchange
Input [3]: [s_store_sk#12, s_store_name#13, s_company_name#14]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ TakeOrderedAndProject [sum_sales,avg_monthly_sales,s_store_name,i_category,i_bra
InputAdapter
BroadcastExchange #5
WholeStageCodegen (3)
Filter [s_store_sk,s_company_name,s_store_name]
Filter [s_store_sk,s_store_name,s_company_name]
ColumnarToRow
InputAdapter
Scan parquet spark_catalog.default.store [s_store_sk,s_store_name,s_company_name]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -121,15 +121,15 @@ Arguments: [ss_item_sk#1 ASC NULLS FIRST], false, 0
Output [3]: [i_item_sk#12, i_brand#13, i_category#14]
Batched: true
Location [not included in comparison]/{warehouse_dir}/item]
PushedFilters: [IsNotNull(i_item_sk), IsNotNull(i_brand), IsNotNull(i_category)]
PushedFilters: [IsNotNull(i_item_sk), IsNotNull(i_category), IsNotNull(i_brand)]
ReadSchema: struct<i_item_sk:int,i_brand:string,i_category:string>

(16) ColumnarToRow [codegen id : 5]
Input [3]: [i_item_sk#12, i_brand#13, i_category#14]

(17) Filter [codegen id : 5]
Input [3]: [i_item_sk#12, i_brand#13, i_category#14]
Condition : ((isnotnull(i_item_sk#12) AND isnotnull(i_brand#13)) AND isnotnull(i_category#14))
Condition : ((isnotnull(i_item_sk#12) AND isnotnull(i_category#14)) AND isnotnull(i_brand#13))

(18) Exchange
Input [3]: [i_item_sk#12, i_brand#13, i_category#14]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ TakeOrderedAndProject [sum_sales,avg_monthly_sales,d_moy,i_category,d_year,psum,
InputAdapter
Exchange [i_item_sk] #7
WholeStageCodegen (5)
Filter [i_item_sk,i_brand,i_category]
Filter [i_item_sk,i_category,i_brand]
ColumnarToRow
InputAdapter
Scan parquet spark_catalog.default.item [i_item_sk,i_brand,i_category]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,15 +50,15 @@ TakeOrderedAndProject (45)
Output [3]: [i_item_sk#1, i_brand#2, i_category#3]
Batched: true
Location [not included in comparison]/{warehouse_dir}/item]
PushedFilters: [IsNotNull(i_item_sk), IsNotNull(i_category), IsNotNull(i_brand)]
PushedFilters: [IsNotNull(i_item_sk), IsNotNull(i_brand), IsNotNull(i_category)]
ReadSchema: struct<i_item_sk:int,i_brand:string,i_category:string>

(2) ColumnarToRow [codegen id : 4]
Input [3]: [i_item_sk#1, i_brand#2, i_category#3]

(3) Filter [codegen id : 4]
Input [3]: [i_item_sk#1, i_brand#2, i_category#3]
Condition : ((isnotnull(i_item_sk#1) AND isnotnull(i_category#3)) AND isnotnull(i_brand#2))
Condition : ((isnotnull(i_item_sk#1) AND isnotnull(i_brand#2)) AND isnotnull(i_category#3))

(4) Scan parquet spark_catalog.default.store_sales
Output [4]: [ss_item_sk#4, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7]
Expand Down Expand Up @@ -106,15 +106,15 @@ Input [8]: [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, ss_sold_da
Output [3]: [s_store_sk#12, s_store_name#13, s_company_name#14]
Batched: true
Location [not included in comparison]/{warehouse_dir}/store]
PushedFilters: [IsNotNull(s_store_sk), IsNotNull(s_store_name), IsNotNull(s_company_name)]
PushedFilters: [IsNotNull(s_store_sk), IsNotNull(s_company_name), IsNotNull(s_store_name)]
ReadSchema: struct<s_store_sk:int,s_store_name:string,s_company_name:string>

(14) ColumnarToRow [codegen id : 3]
Input [3]: [s_store_sk#12, s_store_name#13, s_company_name#14]

(15) Filter [codegen id : 3]
Input [3]: [s_store_sk#12, s_store_name#13, s_company_name#14]
Condition : ((isnotnull(s_store_sk#12) AND isnotnull(s_store_name#13)) AND isnotnull(s_company_name#14))
Condition : ((isnotnull(s_store_sk#12) AND isnotnull(s_company_name#14)) AND isnotnull(s_store_name#13))

(16) BroadcastExchange
Input [3]: [s_store_sk#12, s_store_name#13, s_company_name#14]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ TakeOrderedAndProject [sum_sales,avg_monthly_sales,d_moy,i_category,d_year,psum,
BroadcastHashJoin [ss_sold_date_sk,d_date_sk]
Project [i_brand,i_category,ss_store_sk,ss_sales_price,ss_sold_date_sk]
BroadcastHashJoin [i_item_sk,ss_item_sk]
Filter [i_item_sk,i_category,i_brand]
Filter [i_item_sk,i_brand,i_category]
ColumnarToRow
InputAdapter
Scan parquet spark_catalog.default.item [i_item_sk,i_brand,i_category]
Expand All @@ -51,7 +51,7 @@ TakeOrderedAndProject [sum_sales,avg_monthly_sales,d_moy,i_category,d_year,psum,
InputAdapter
BroadcastExchange #5
WholeStageCodegen (3)
Filter [s_store_sk,s_store_name,s_company_name]
Filter [s_store_sk,s_company_name,s_store_name]
ColumnarToRow
InputAdapter
Scan parquet spark_catalog.default.store [s_store_sk,s_store_name,s_company_name]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -149,15 +149,15 @@ Condition : isnotnull(cs_item_sk#1)
Output [6]: [i_item_sk#7, i_brand_id#8, i_class_id#9, i_category_id#10, i_category#11, i_manufact_id#12]
Batched: true
Location [not included in comparison]/{warehouse_dir}/item]
PushedFilters: [IsNotNull(i_category), EqualTo(i_category,Books ), IsNotNull(i_item_sk), IsNotNull(i_category_id), IsNotNull(i_brand_id), IsNotNull(i_class_id), IsNotNull(i_manufact_id)]
PushedFilters: [IsNotNull(i_category), EqualTo(i_category,Books ), IsNotNull(i_item_sk), IsNotNull(i_class_id), IsNotNull(i_category_id), IsNotNull(i_brand_id), IsNotNull(i_manufact_id)]
ReadSchema: struct<i_item_sk:int,i_brand_id:int,i_class_id:int,i_category_id:int,i_category:string,i_manufact_id:int>

(5) ColumnarToRow [codegen id : 1]
Input [6]: [i_item_sk#7, i_brand_id#8, i_class_id#9, i_category_id#10, i_category#11, i_manufact_id#12]

(6) Filter [codegen id : 1]
Input [6]: [i_item_sk#7, i_brand_id#8, i_class_id#9, i_category_id#10, i_category#11, i_manufact_id#12]
Condition : ((((((isnotnull(i_category#11) AND (i_category#11 = Books )) AND isnotnull(i_item_sk#7)) AND isnotnull(i_category_id#10)) AND isnotnull(i_brand_id#8)) AND isnotnull(i_class_id#9)) AND isnotnull(i_manufact_id#12))
Condition : ((((((isnotnull(i_category#11) AND (i_category#11 = Books )) AND isnotnull(i_item_sk#7)) AND isnotnull(i_class_id#9)) AND isnotnull(i_category_id#10)) AND isnotnull(i_brand_id#8)) AND isnotnull(i_manufact_id#12))

(7) Project [codegen id : 1]
Output [5]: [i_item_sk#7, i_brand_id#8, i_class_id#9, i_category_id#10, i_manufact_id#12]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ TakeOrderedAndProject [sales_cnt_diff,sales_amt_diff,prev_year,year,i_brand_id,i
BroadcastExchange #6
WholeStageCodegen (1)
Project [i_item_sk,i_brand_id,i_class_id,i_category_id,i_manufact_id]
Filter [i_category,i_item_sk,i_category_id,i_brand_id,i_class_id,i_manufact_id]
Filter [i_category,i_item_sk,i_class_id,i_category_id,i_brand_id,i_manufact_id]
ColumnarToRow
InputAdapter
Scan parquet spark_catalog.default.item [i_item_sk,i_brand_id,i_class_id,i_category_id,i_category,i_manufact_id]
Expand Down

0 comments on commit dd02261

Please sign in to comment.