Skip to content

Commit

Permalink
Update PlanStabilitySuite
Browse files Browse the repository at this point in the history
  • Loading branch information
wangyum committed Aug 24, 2020
1 parent 4901af4 commit c436bc4
Show file tree
Hide file tree
Showing 100 changed files with 992 additions and 902 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -63,15 +63,15 @@ TakeOrderedAndProject (58)
Output [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5]
Batched: true
Location: InMemoryFileIndex [file:/Users/yi.wu/IdeaProjects/spark/sql/core/spark-warehouse/org.apache.spark.sql.TPCDSV1_4_PlanStabilityWithStatsSuite/customer]
PushedFilters: [IsNotNull(c_current_addr_sk), IsNotNull(c_current_cdemo_sk)]
PushedFilters: [IsNotNull(c_current_addr_sk), IsNotNull(c_current_cdemo_sk), IsNotNull(c_customer_sk)]
ReadSchema: struct<c_customer_sk:int,c_current_cdemo_sk:int,c_current_addr_sk:int>

(2) ColumnarToRow [codegen id : 1]
Input [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5]

(3) Filter [codegen id : 1]
Input [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5]
Condition : (isnotnull(c_current_addr_sk#5) AND isnotnull(c_current_cdemo_sk#4))
Condition : ((isnotnull(c_current_addr_sk#5) AND isnotnull(c_current_cdemo_sk#4)) AND isnotnull(c_customer_sk#3))

(4) Exchange
Input [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5]
Expand All @@ -85,15 +85,15 @@ Arguments: [c_customer_sk#3 ASC NULLS FIRST], false, 0
Output [2]: [ss_sold_date_sk#7, ss_customer_sk#8]
Batched: true
Location: InMemoryFileIndex [file:/Users/yi.wu/IdeaProjects/spark/sql/core/spark-warehouse/org.apache.spark.sql.TPCDSV1_4_PlanStabilityWithStatsSuite/store_sales]
PushedFilters: [IsNotNull(ss_sold_date_sk)]
PushedFilters: [IsNotNull(ss_sold_date_sk), IsNotNull(ss_customer_sk)]
ReadSchema: struct<ss_sold_date_sk:int,ss_customer_sk:int>

(7) ColumnarToRow [codegen id : 4]
Input [2]: [ss_sold_date_sk#7, ss_customer_sk#8]

(8) Filter [codegen id : 4]
Input [2]: [ss_sold_date_sk#7, ss_customer_sk#8]
Condition : isnotnull(ss_sold_date_sk#7)
Condition : (isnotnull(ss_sold_date_sk#7) AND isnotnull(ss_customer_sk#8))

(9) Scan parquet default.date_dim
Output [3]: [d_date_sk#9, d_year#10, d_moy#11]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ TakeOrderedAndProject [cd_credit_rating,cd_dep_college_count,cd_dep_count,cd_dep
InputAdapter
Exchange [c_customer_sk] #3
WholeStageCodegen (1)
Filter [c_current_addr_sk,c_current_cdemo_sk]
Filter [c_current_addr_sk,c_current_cdemo_sk,c_customer_sk]
ColumnarToRow
InputAdapter
Scan parquet default.customer [c_current_addr_sk,c_current_cdemo_sk,c_customer_sk]
Expand All @@ -37,7 +37,7 @@ TakeOrderedAndProject [cd_credit_rating,cd_dep_college_count,cd_dep_count,cd_dep
WholeStageCodegen (4)
Project [ss_customer_sk]
BroadcastHashJoin [d_date_sk,ss_sold_date_sk]
Filter [ss_sold_date_sk]
Filter [ss_customer_sk,ss_sold_date_sk]
ColumnarToRow
InputAdapter
Scan parquet default.store_sales [ss_customer_sk,ss_sold_date_sk]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,29 +55,29 @@ TakeOrderedAndProject (50)
Output [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5]
Batched: true
Location: InMemoryFileIndex [file:/Users/yi.wu/IdeaProjects/spark/sql/core/spark-warehouse/org.apache.spark.sql.TPCDSV1_4_PlanStabilitySuite/customer]
PushedFilters: [IsNotNull(c_current_addr_sk), IsNotNull(c_current_cdemo_sk)]
PushedFilters: [IsNotNull(c_current_addr_sk), IsNotNull(c_current_cdemo_sk), IsNotNull(c_customer_sk)]
ReadSchema: struct<c_customer_sk:int,c_current_cdemo_sk:int,c_current_addr_sk:int>

(2) ColumnarToRow [codegen id : 9]
Input [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5]

(3) Filter [codegen id : 9]
Input [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5]
Condition : (isnotnull(c_current_addr_sk#5) AND isnotnull(c_current_cdemo_sk#4))
Condition : ((isnotnull(c_current_addr_sk#5) AND isnotnull(c_current_cdemo_sk#4)) AND isnotnull(c_customer_sk#3))

(4) Scan parquet default.store_sales
Output [2]: [ss_sold_date_sk#6, ss_customer_sk#7]
Batched: true
Location: InMemoryFileIndex [file:/Users/yi.wu/IdeaProjects/spark/sql/core/spark-warehouse/org.apache.spark.sql.TPCDSV1_4_PlanStabilitySuite/store_sales]
PushedFilters: [IsNotNull(ss_sold_date_sk)]
PushedFilters: [IsNotNull(ss_sold_date_sk), IsNotNull(ss_customer_sk)]
ReadSchema: struct<ss_sold_date_sk:int,ss_customer_sk:int>

(5) ColumnarToRow [codegen id : 2]
Input [2]: [ss_sold_date_sk#6, ss_customer_sk#7]

(6) Filter [codegen id : 2]
Input [2]: [ss_sold_date_sk#6, ss_customer_sk#7]
Condition : isnotnull(ss_sold_date_sk#6)
Condition : (isnotnull(ss_sold_date_sk#6) AND isnotnull(ss_customer_sk#7))

(7) Scan parquet default.date_dim
Output [3]: [d_date_sk#8, d_year#9, d_moy#10]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ TakeOrderedAndProject [cd_credit_rating,cd_dep_college_count,cd_dep_count,cd_dep
BroadcastHashJoin [c_customer_sk,cs_ship_customer_sk]
BroadcastHashJoin [c_customer_sk,ws_bill_customer_sk]
BroadcastHashJoin [c_customer_sk,ss_customer_sk]
Filter [c_current_addr_sk,c_current_cdemo_sk]
Filter [c_current_addr_sk,c_current_cdemo_sk,c_customer_sk]
ColumnarToRow
InputAdapter
Scan parquet default.customer [c_current_addr_sk,c_current_cdemo_sk,c_customer_sk]
Expand All @@ -23,7 +23,7 @@ TakeOrderedAndProject [cd_credit_rating,cd_dep_college_count,cd_dep_count,cd_dep
WholeStageCodegen (2)
Project [ss_customer_sk]
BroadcastHashJoin [d_date_sk,ss_sold_date_sk]
Filter [ss_sold_date_sk]
Filter [ss_customer_sk,ss_sold_date_sk]
ColumnarToRow
InputAdapter
Scan parquet default.store_sales [ss_customer_sk,ss_sold_date_sk]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -158,15 +158,15 @@ Arguments: [ss_item_sk#2 ASC NULLS FIRST], false, 0
Output [4]: [i_item_sk#6, i_brand_id#7, i_class_id#8, i_category_id#9]
Batched: true
Location: InMemoryFileIndex [file:/Users/yi.wu/IdeaProjects/spark/sql/core/spark-warehouse/org.apache.spark.sql.TPCDSV1_4_PlanStabilityWithStatsSuite/item]
PushedFilters: [IsNotNull(i_class_id), IsNotNull(i_brand_id), IsNotNull(i_category_id)]
PushedFilters: [IsNotNull(i_class_id), IsNotNull(i_brand_id), IsNotNull(i_category_id), IsNotNull(i_item_sk)]
ReadSchema: struct<i_item_sk:int,i_brand_id:int,i_class_id:int,i_category_id:int>

(7) ColumnarToRow [codegen id : 17]
Input [4]: [i_item_sk#6, i_brand_id#7, i_class_id#8, i_category_id#9]

(8) Filter [codegen id : 17]
Input [4]: [i_item_sk#6, i_brand_id#7, i_class_id#8, i_category_id#9]
Condition : ((isnotnull(i_class_id#8) AND isnotnull(i_brand_id#7)) AND isnotnull(i_category_id#9))
Condition : (((isnotnull(i_class_id#8) AND isnotnull(i_brand_id#7)) AND isnotnull(i_category_id#9)) AND isnotnull(i_item_sk#6))

(9) Scan parquet default.store_sales
Output [2]: [ss_sold_date_sk#1, ss_item_sk#2]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ TakeOrderedAndProject [channel,i_brand_id,i_category_id,i_class_id,sum(number_sa
WholeStageCodegen (17)
Project [i_item_sk]
BroadcastHashJoin [brand_id,category_id,class_id,i_brand_id,i_category_id,i_class_id]
Filter [i_brand_id,i_category_id,i_class_id]
Filter [i_brand_id,i_category_id,i_class_id,i_item_sk]
ColumnarToRow
InputAdapter
Scan parquet default.item [i_brand_id,i_category_id,i_class_id,i_item_sk]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -134,15 +134,15 @@ Condition : (isnotnull(ss_item_sk#2) AND isnotnull(ss_sold_date_sk#1))
Output [4]: [i_item_sk#5, i_brand_id#6, i_class_id#7, i_category_id#8]
Batched: true
Location: InMemoryFileIndex [file:/Users/yi.wu/IdeaProjects/spark/sql/core/spark-warehouse/org.apache.spark.sql.TPCDSV1_4_PlanStabilitySuite/item]
PushedFilters: [IsNotNull(i_class_id), IsNotNull(i_brand_id), IsNotNull(i_category_id)]
PushedFilters: [IsNotNull(i_class_id), IsNotNull(i_brand_id), IsNotNull(i_category_id), IsNotNull(i_item_sk)]
ReadSchema: struct<i_item_sk:int,i_brand_id:int,i_class_id:int,i_category_id:int>

(5) ColumnarToRow [codegen id : 11]
Input [4]: [i_item_sk#5, i_brand_id#6, i_class_id#7, i_category_id#8]

(6) Filter [codegen id : 11]
Input [4]: [i_item_sk#5, i_brand_id#6, i_class_id#7, i_category_id#8]
Condition : ((isnotnull(i_class_id#7) AND isnotnull(i_brand_id#6)) AND isnotnull(i_category_id#8))
Condition : (((isnotnull(i_class_id#7) AND isnotnull(i_brand_id#6)) AND isnotnull(i_category_id#8)) AND isnotnull(i_item_sk#5))

(7) Scan parquet default.store_sales
Output [2]: [ss_sold_date_sk#1, ss_item_sk#2]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ TakeOrderedAndProject [channel,i_brand_id,i_category_id,i_class_id,sum(number_sa
WholeStageCodegen (11)
Project [i_item_sk]
BroadcastHashJoin [brand_id,category_id,class_id,i_brand_id,i_category_id,i_class_id]
Filter [i_brand_id,i_category_id,i_class_id]
Filter [i_brand_id,i_category_id,i_class_id,i_item_sk]
ColumnarToRow
InputAdapter
Scan parquet default.item [i_brand_id,i_category_id,i_class_id,i_item_sk]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -137,15 +137,15 @@ Arguments: [ss_item_sk#2 ASC NULLS FIRST], false, 0
Output [4]: [i_item_sk#6, i_brand_id#7, i_class_id#8, i_category_id#9]
Batched: true
Location: InMemoryFileIndex [file:/Users/yi.wu/IdeaProjects/spark/sql/core/spark-warehouse/org.apache.spark.sql.TPCDSV1_4_PlanStabilityWithStatsSuite/item]
PushedFilters: [IsNotNull(i_class_id), IsNotNull(i_brand_id), IsNotNull(i_category_id)]
PushedFilters: [IsNotNull(i_class_id), IsNotNull(i_brand_id), IsNotNull(i_category_id), IsNotNull(i_item_sk)]
ReadSchema: struct<i_item_sk:int,i_brand_id:int,i_class_id:int,i_category_id:int>

(7) ColumnarToRow [codegen id : 17]
Input [4]: [i_item_sk#6, i_brand_id#7, i_class_id#8, i_category_id#9]

(8) Filter [codegen id : 17]
Input [4]: [i_item_sk#6, i_brand_id#7, i_class_id#8, i_category_id#9]
Condition : ((isnotnull(i_class_id#8) AND isnotnull(i_brand_id#7)) AND isnotnull(i_category_id#9))
Condition : (((isnotnull(i_class_id#8) AND isnotnull(i_brand_id#7)) AND isnotnull(i_category_id#9)) AND isnotnull(i_item_sk#6))

(9) Scan parquet default.store_sales
Output [2]: [ss_sold_date_sk#1, ss_item_sk#2]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ TakeOrderedAndProject [channel,channel,i_brand_id,i_brand_id,i_category_id,i_cat
WholeStageCodegen (17)
Project [i_item_sk]
BroadcastHashJoin [brand_id,category_id,class_id,i_brand_id,i_category_id,i_class_id]
Filter [i_brand_id,i_category_id,i_class_id]
Filter [i_brand_id,i_category_id,i_class_id,i_item_sk]
ColumnarToRow
InputAdapter
Scan parquet default.item [i_brand_id,i_category_id,i_class_id,i_item_sk]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -119,15 +119,15 @@ Condition : (isnotnull(ss_item_sk#2) AND isnotnull(ss_sold_date_sk#1))
Output [4]: [i_item_sk#5, i_brand_id#6, i_class_id#7, i_category_id#8]
Batched: true
Location: InMemoryFileIndex [file:/Users/yi.wu/IdeaProjects/spark/sql/core/spark-warehouse/org.apache.spark.sql.TPCDSV1_4_PlanStabilitySuite/item]
PushedFilters: [IsNotNull(i_class_id), IsNotNull(i_brand_id), IsNotNull(i_category_id)]
PushedFilters: [IsNotNull(i_class_id), IsNotNull(i_brand_id), IsNotNull(i_category_id), IsNotNull(i_item_sk)]
ReadSchema: struct<i_item_sk:int,i_brand_id:int,i_class_id:int,i_category_id:int>

(5) ColumnarToRow [codegen id : 11]
Input [4]: [i_item_sk#5, i_brand_id#6, i_class_id#7, i_category_id#8]

(6) Filter [codegen id : 11]
Input [4]: [i_item_sk#5, i_brand_id#6, i_class_id#7, i_category_id#8]
Condition : ((isnotnull(i_class_id#7) AND isnotnull(i_brand_id#6)) AND isnotnull(i_category_id#8))
Condition : (((isnotnull(i_class_id#7) AND isnotnull(i_brand_id#6)) AND isnotnull(i_category_id#8)) AND isnotnull(i_item_sk#5))

(7) Scan parquet default.store_sales
Output [2]: [ss_sold_date_sk#1, ss_item_sk#2]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ TakeOrderedAndProject [channel,channel,i_brand_id,i_brand_id,i_category_id,i_cat
WholeStageCodegen (11)
Project [i_item_sk]
BroadcastHashJoin [brand_id,category_id,class_id,i_brand_id,i_category_id,i_class_id]
Filter [i_brand_id,i_category_id,i_class_id]
Filter [i_brand_id,i_category_id,i_class_id,i_item_sk]
ColumnarToRow
InputAdapter
Scan parquet default.item [i_brand_id,i_category_id,i_class_id,i_item_sk]
Expand Down
Loading

0 comments on commit c436bc4

Please sign in to comment.