diff --git a/tsl/src/nodes/decompress_chunk/vector_quals.h b/tsl/src/nodes/decompress_chunk/vector_quals.h index 4fd2e058e4a..978fc55fd1a 100644 --- a/tsl/src/nodes/decompress_chunk/vector_quals.h +++ b/tsl/src/nodes/decompress_chunk/vector_quals.h @@ -24,11 +24,13 @@ typedef struct VectorQualInfo */ Index rti; + bool reverse; /* - * Array indexed by uncompressed attno indicating whether an - * attribute/column is a vectorizable type. + * Arrays indexed by uncompressed attno indicating whether an + * attribute/column is a vectorizable type and/or a segmentby attribute. */ bool *vector_attrs; + bool *segmentby_attrs; } VectorQualInfo; /* diff --git a/tsl/src/nodes/vector_agg/CMakeLists.txt b/tsl/src/nodes/vector_agg/CMakeLists.txt index cf69755c077..c38b0aa74a1 100644 --- a/tsl/src/nodes/vector_agg/CMakeLists.txt +++ b/tsl/src/nodes/vector_agg/CMakeLists.txt @@ -4,5 +4,6 @@ set(SOURCES ${CMAKE_CURRENT_SOURCE_DIR}/exec.c ${CMAKE_CURRENT_SOURCE_DIR}/grouping_policy_batch.c ${CMAKE_CURRENT_SOURCE_DIR}/grouping_policy_hash.c - ${CMAKE_CURRENT_SOURCE_DIR}/plan.c) + ${CMAKE_CURRENT_SOURCE_DIR}/plan.c + ${CMAKE_CURRENT_SOURCE_DIR}/plan_decompress_chunk.c) target_sources(${TSL_LIBRARY_NAME} PRIVATE ${SOURCES}) diff --git a/tsl/src/nodes/vector_agg/plan.c b/tsl/src/nodes/vector_agg/plan.c index fc6a983483e..a858bf3a2e4 100644 --- a/tsl/src/nodes/vector_agg/plan.c +++ b/tsl/src/nodes/vector_agg/plan.c @@ -3,21 +3,21 @@ * Please see the included NOTICE for copyright information and * LICENSE-TIMESCALE for a copy of the license. */ - #include - +#include #include #include #include #include #include +#include +#include #include #include "plan.h" #include "exec.h" #include "import/list.h" -#include "nodes/decompress_chunk/planner.h" #include "nodes/decompress_chunk/vector_quals.h" #include "nodes/vector_agg.h" #include "utils.h" @@ -82,7 +82,7 @@ resolve_outer_special_vars_mutator(Node *node, void *context) { /* * This is already the uncompressed chunk var. We can see it referenced - * by expressions in the output targetlist of DecompressChunk node. + * by expressions in the output targetlist of the child scan node. */ return (Node *) copyObject(var); } @@ -90,7 +90,7 @@ resolve_outer_special_vars_mutator(Node *node, void *context) if (var->varno == OUTER_VAR) { /* - * Reference into the output targetlist of the DecompressChunk node. + * Reference into the output targetlist of the child scan node. */ TargetEntry *decompress_chunk_tentry = castNode(TargetEntry, list_nth(custom->scan.plan.targetlist, var->varattno - 1)); @@ -122,9 +122,9 @@ resolve_outer_special_vars_mutator(Node *node, void *context) * variables. */ static List * -resolve_outer_special_vars(List *agg_tlist, CustomScan *custom) +resolve_outer_special_vars(List *agg_tlist, Plan *childplan) { - return castNode(List, resolve_outer_special_vars_mutator((Node *) agg_tlist, custom)); + return castNode(List, resolve_outer_special_vars_mutator((Node *) agg_tlist, childplan)); } /* @@ -132,11 +132,11 @@ resolve_outer_special_vars(List *agg_tlist, CustomScan *custom) * node. */ static Plan * -vector_agg_plan_create(Agg *agg, CustomScan *decompress_chunk, List *resolved_targetlist, +vector_agg_plan_create(Plan *childplan, Agg *agg, List *resolved_targetlist, VectorAggGroupingType grouping_type) { CustomScan *vector_agg = (CustomScan *) makeNode(CustomScan); - vector_agg->custom_plans = list_make1(decompress_chunk); + vector_agg->custom_plans = list_make1(childplan); vector_agg->methods = &scan_methods; vector_agg->custom_scan_tlist = resolved_targetlist; @@ -161,7 +161,7 @@ vector_agg_plan_create(Agg *agg, CustomScan *decompress_chunk, List *resolved_ta vector_agg->scan.plan.total_cost = agg->plan.total_cost; vector_agg->scan.plan.parallel_aware = false; - vector_agg->scan.plan.parallel_safe = decompress_chunk->scan.plan.parallel_safe; + vector_agg->scan.plan.parallel_safe = childplan->parallel_safe; vector_agg->scan.plan.async_capable = false; vector_agg->scan.plan.plan_node_id = agg->plan.plan_node_id; @@ -180,75 +180,12 @@ vector_agg_plan_create(Agg *agg, CustomScan *decompress_chunk, List *resolved_ta return (Plan *) vector_agg; } -/* - * Map the custom scan attribute number to the uncompressed chunk attribute - * number. - */ -static int -custom_scan_to_uncompressed_chunk_attno(List *custom_scan_tlist, int custom_scan_attno) -{ - if (custom_scan_tlist == NIL) - { - return custom_scan_attno; - } - - Var *var = - castNode(Var, - castNode(TargetEntry, - list_nth(custom_scan_tlist, AttrNumberGetAttrOffset(custom_scan_attno))) - ->expr); - return var->varattno; -} - -/* - * Whether the given compressed column index corresponds to a vector variable. - */ -static bool -is_vector_compressed_column(CustomScan *custom, int compressed_column_index, bool *out_is_segmentby) -{ - List *bulk_decompression_column = list_nth(custom->custom_private, DCP_BulkDecompressionColumn); - const bool bulk_decompression_enabled_for_column = - list_nth_int(bulk_decompression_column, compressed_column_index); - - /* - * Bulk decompression can be disabled for all columns in the DecompressChunk - * node settings, we can't do vectorized aggregation for compressed columns - * in that case. For segmentby columns it's still possible. - */ - List *settings = linitial(custom->custom_private); - const bool bulk_decompression_enabled_globally = - list_nth_int(settings, DCS_EnableBulkDecompression); - - /* - * Check if this column is a segmentby. - */ - List *is_segmentby_column = list_nth(custom->custom_private, DCP_IsSegmentbyColumn); - const bool is_segmentby = list_nth_int(is_segmentby_column, compressed_column_index); - if (out_is_segmentby) - { - *out_is_segmentby = is_segmentby; - } - - /* - * We support vectorized aggregation either for segmentby columns or for - * columns with bulk decompression enabled. - */ - if (!is_segmentby && - !(bulk_decompression_enabled_for_column && bulk_decompression_enabled_globally)) - { - /* Vectorized aggregation not possible for this particular column. */ - return false; - } - - return true; -} - /* * Whether the expression can be used for vectorized processing: must be a Var * that refers to either a bulk-decompressed or a segmentby column. */ static bool -is_vector_var(CustomScan *custom, Expr *expr, bool *out_is_segmentby) +is_vector_var(const VectorQualInfo *vqinfo, Expr *expr) { if (!IsA(expr, Var)) { @@ -256,118 +193,22 @@ is_vector_var(CustomScan *custom, Expr *expr, bool *out_is_segmentby) return false; } - Var *decompressed_var = castNode(Var, expr); + Var *var = castNode(Var, expr); - /* - * This must be called after resolve_outer_special_vars(), so we should only - * see the uncompressed chunk variables here. - */ - Ensure((Index) decompressed_var->varno == (Index) custom->scan.scanrelid, - "expected scan varno %d got %d", - custom->scan.scanrelid, - decompressed_var->varno); - - if (decompressed_var->varattno <= 0) + if (var->varattno <= 0) { /* Can't work with special attributes like tableoid. */ - if (out_is_segmentby) - { - *out_is_segmentby = false; - } return false; } - /* - * Now, we have to translate the decompressed varno into the compressed - * column index, to check if the column supports bulk decompression. - */ - List *decompression_map = list_nth(custom->custom_private, DCP_DecompressionMap); - int compressed_column_index = 0; - for (; compressed_column_index < list_length(decompression_map); compressed_column_index++) - { - const int custom_scan_attno = list_nth_int(decompression_map, compressed_column_index); - if (custom_scan_attno <= 0) - { - continue; - } - - const int uncompressed_chunk_attno = - custom_scan_to_uncompressed_chunk_attno(custom->custom_scan_tlist, custom_scan_attno); - - if (uncompressed_chunk_attno == decompressed_var->varattno) - { - break; - } - } - Ensure(compressed_column_index < list_length(decompression_map), "compressed column not found"); - return is_vector_compressed_column(custom, compressed_column_index, out_is_segmentby); -} - -/* - * Build supplementary info to determine whether we can vectorize the - * aggregate FILTER clauses. - */ -static VectorQualInfo -build_aggfilter_vector_qual_info(CustomScan *custom) -{ - VectorQualInfo vqi = { .rti = custom->scan.scanrelid }; - - /* - * Now, we have to translate the decompressed varno into the compressed - * column index, to check if the column supports bulk decompression. - */ - List *decompression_map = list_nth(custom->custom_private, DCP_DecompressionMap); - - /* - * There's no easy way to determine maximum attribute number for uncompressed - * chunk at this stage, so we'll have to go through all the compressed columns - * for this. - */ - int maxattno = 0; - for (int compressed_column_index = 0; compressed_column_index < list_length(decompression_map); - compressed_column_index++) - { - const int custom_scan_attno = list_nth_int(decompression_map, compressed_column_index); - if (custom_scan_attno <= 0) - { - continue; - } - - const int uncompressed_chunk_attno = - custom_scan_to_uncompressed_chunk_attno(custom->custom_scan_tlist, custom_scan_attno); - - if (uncompressed_chunk_attno > maxattno) - { - maxattno = uncompressed_chunk_attno; - } - } - - vqi.vector_attrs = (bool *) palloc0(sizeof(bool) * (maxattno + 1)); - - for (int compressed_column_index = 0; compressed_column_index < list_length(decompression_map); - compressed_column_index++) - { - const int custom_scan_attno = list_nth_int(decompression_map, compressed_column_index); - if (custom_scan_attno <= 0) - { - continue; - } - - const int uncompressed_chunk_attno = - custom_scan_to_uncompressed_chunk_attno(custom->custom_scan_tlist, custom_scan_attno); - - vqi.vector_attrs[uncompressed_chunk_attno] = - is_vector_compressed_column(custom, compressed_column_index, NULL); - } - - return vqi; + return vqinfo->vector_attrs[var->varattno]; } /* * Whether we can vectorize this particular aggregate. */ static bool -can_vectorize_aggref(Aggref *aggref, CustomScan *custom, VectorQualInfo *vqi) +can_vectorize_aggref(const VectorQualInfo *vqi, Aggref *aggref) { if (aggref->aggdirectargs != NIL) { @@ -416,19 +257,15 @@ can_vectorize_aggref(Aggref *aggref, CustomScan *custom, VectorQualInfo *vqi) /* The function must have one argument, check it. */ Assert(list_length(aggref->args) == 1); TargetEntry *argument = castNode(TargetEntry, linitial(aggref->args)); - if (!is_vector_var(custom, argument->expr, NULL)) - { - return false; - } - return true; + return is_vector_var(vqi, argument->expr); } /* * What vectorized grouping strategy we can use for the given grouping columns. */ static VectorAggGroupingType -get_vectorized_grouping_type(Agg *agg, CustomScan *custom, List *resolved_targetlist) +get_vectorized_grouping_type(const VectorQualInfo *vqinfo, Agg *agg, List *resolved_targetlist) { /* * The Agg->numCols value can be less than the number of the non-aggregated @@ -462,14 +299,11 @@ get_vectorized_grouping_type(Agg *agg, CustomScan *custom, List *resolved_target num_grouping_columns++; - Var *var = castNode(Var, target_entry->expr); - bool is_segmentby; - if (!is_vector_var(custom, (Expr *) var, &is_segmentby)) - { + if (!is_vector_var(vqinfo, target_entry->expr)) return VAGT_Invalid; - } - all_segmentby &= is_segmentby; + Var *var = castNode(Var, target_entry->expr); + all_segmentby &= vqinfo->segmentby_attrs[var->varattno]; /* * If we have a single grouping column, record it for the additional @@ -512,6 +346,7 @@ get_vectorized_grouping_type(Agg *agg, CustomScan *custom, List *resolved_target { int16 typlen; bool typbyval; + get_typlenbyval(single_grouping_var->vartype, &typlen, &typbyval); if (typbyval) { @@ -605,21 +440,50 @@ has_vector_agg_node(Plan *plan, bool *has_normal_agg) return false; } +/* + * Check if a VectorAgg is possible on top of the given child plan. + * + * If the child plan is compatible, also initialize the VectorQualInfo struct + * for aggregation FILTER clauses. + * + * Returns true if the scan node is a supported child, otherwise false. + */ +static bool +vectoragg_plan_possible(Plan *childplan, const List *rtable, VectorQualInfo *vqi) +{ + if (!IsA(childplan, CustomScan)) + return false; + + if (childplan->qual != NIL) + { + /* Can't do vectorized aggregation if we have Postgres quals. */ + return false; + } + + CustomScan *customscan = castNode(CustomScan, childplan); + bool vectoragg_possible = false; + + if (strcmp(customscan->methods->CustomName, "DecompressChunk") == 0) + vectoragg_possible = vectoragg_plan_decompress_chunk(childplan, vqi); + + return vectoragg_possible; +} + /* * Where possible, replace the partial aggregation plan nodes with our own * vectorized aggregation node. The replacement is done in-place. */ Plan * -try_insert_vector_agg_node(Plan *plan) +try_insert_vector_agg_node(Plan *plan, List *rtable) { if (plan->lefttree) { - plan->lefttree = try_insert_vector_agg_node(plan->lefttree); + plan->lefttree = try_insert_vector_agg_node(plan->lefttree, rtable); } if (plan->righttree) { - plan->righttree = try_insert_vector_agg_node(plan->righttree); + plan->righttree = try_insert_vector_agg_node(plan->righttree, rtable); } List *append_plans = NIL; @@ -650,7 +514,7 @@ try_insert_vector_agg_node(Plan *plan) ListCell *lc; foreach (lc, append_plans) { - lfirst(lc) = try_insert_vector_agg_node(lfirst(lc)); + lfirst(lc) = try_insert_vector_agg_node(lfirst(lc), rtable); } return plan; } @@ -693,26 +557,16 @@ try_insert_vector_agg_node(Plan *plan) return plan; } - if (!IsA(agg->plan.lefttree, CustomScan)) - { - /* - * Should have a Custom Scan under aggregation. - */ - return plan; - } - - CustomScan *custom = castNode(CustomScan, agg->plan.lefttree); - if (strcmp(custom->methods->CustomName, "DecompressChunk") != 0) - { - /* - * It should be our DecompressChunk node. - */ - return plan; - } + Plan *childplan = agg->plan.lefttree; + VectorQualInfo vqi; - if (custom->scan.plan.qual != NIL) + /* + * Build supplementary info to determine whether we can vectorize the + * aggregate FILTER clauses. + */ + if (!vectoragg_plan_possible(childplan, rtable, &vqi)) { - /* Can't do vectorized aggregation if we have Postgres quals. */ + /* Not a compatible vectoragg child node */ return plan; } @@ -721,10 +575,10 @@ try_insert_vector_agg_node(Plan *plan) * the subsequent checks are performed on the aggregated targetlist with * all variables resolved to uncompressed chunk variables. */ - List *resolved_targetlist = resolve_outer_special_vars(agg->plan.targetlist, custom); + List *resolved_targetlist = resolve_outer_special_vars(agg->plan.targetlist, childplan); const VectorAggGroupingType grouping_type = - get_vectorized_grouping_type(agg, custom, resolved_targetlist); + get_vectorized_grouping_type(&vqi, agg, resolved_targetlist); if (grouping_type == VAGT_Invalid) { /* The grouping is not vectorizable. */ @@ -738,20 +592,12 @@ try_insert_vector_agg_node(Plan *plan) */ if (grouping_type != VAGT_Batch && agg->aggstrategy != AGG_HASHED) { - List *settings = linitial(custom->custom_private); - const bool reverse = list_nth_int(settings, DCS_Reverse); - if (reverse) + if (vqi.reverse) { return plan; } } - /* - * Build supplementary info to determine whether we can vectorize the - * aggregate FILTER clauses. - */ - VectorQualInfo vqi = build_aggfilter_vector_qual_info(custom); - /* Now check the output targetlist. */ ListCell *lc; foreach (lc, resolved_targetlist) @@ -760,7 +606,7 @@ try_insert_vector_agg_node(Plan *plan) if (IsA(target_entry->expr, Aggref)) { Aggref *aggref = castNode(Aggref, target_entry->expr); - if (!can_vectorize_aggref(aggref, custom, &vqi)) + if (!can_vectorize_aggref(&vqi, aggref)) { /* Aggregate function not vectorizable. */ return plan; @@ -768,7 +614,7 @@ try_insert_vector_agg_node(Plan *plan) } else if (IsA(target_entry->expr, Var)) { - if (!is_vector_var(custom, target_entry->expr, NULL)) + if (!is_vector_var(&vqi, target_entry->expr)) { /* Variable not vectorizable. */ return plan; @@ -789,5 +635,5 @@ try_insert_vector_agg_node(Plan *plan) * Finally, all requirements are satisfied and we can vectorize this partial * aggregation node. */ - return vector_agg_plan_create(agg, custom, resolved_targetlist, grouping_type); + return vector_agg_plan_create(childplan, agg, resolved_targetlist, grouping_type); } diff --git a/tsl/src/nodes/vector_agg/plan.h b/tsl/src/nodes/vector_agg/plan.h index 94dabb35d6d..1effeb5a912 100644 --- a/tsl/src/nodes/vector_agg/plan.h +++ b/tsl/src/nodes/vector_agg/plan.h @@ -3,10 +3,11 @@ * Please see the included NOTICE for copyright information and * LICENSE-TIMESCALE for a copy of the license. */ - #include - #include +#include + +#include "nodes/decompress_chunk/vector_quals.h" typedef struct VectorAggPlan { @@ -23,6 +24,7 @@ typedef enum } VectorAggSettingsIndex; extern void _vector_agg_init(void); +extern bool vectoragg_plan_decompress_chunk(Plan *childplan, VectorQualInfo *vqi); -Plan *try_insert_vector_agg_node(Plan *plan); +Plan *try_insert_vector_agg_node(Plan *plan, List *rtable); bool has_vector_agg_node(Plan *plan, bool *has_normal_agg); diff --git a/tsl/src/nodes/vector_agg/plan_decompress_chunk.c b/tsl/src/nodes/vector_agg/plan_decompress_chunk.c new file mode 100644 index 00000000000..602c55244dd --- /dev/null +++ b/tsl/src/nodes/vector_agg/plan_decompress_chunk.c @@ -0,0 +1,139 @@ +/* + * This file and its contents are licensed under the Timescale License. + * Please see the included NOTICE for copyright information and + * LICENSE-TIMESCALE for a copy of the license. + */ +#include +#include +#include + +#include "nodes/decompress_chunk/planner.h" +#include "plan.h" + +/* + * Whether the given compressed column index corresponds to a vector variable. + */ +static bool +is_vector_compressed_column(const CustomScan *custom, int compressed_column_index, + bool *out_is_segmentby) +{ + List *bulk_decompression_column = list_nth(custom->custom_private, DCP_BulkDecompressionColumn); + const bool bulk_decompression_enabled_for_column = + list_nth_int(bulk_decompression_column, compressed_column_index); + + /* + * Bulk decompression can be disabled for all columns in the DecompressChunk + * node settings, we can't do vectorized aggregation for compressed columns + * in that case. For segmentby columns it's still possible. + */ + List *settings = linitial(custom->custom_private); + const bool bulk_decompression_enabled_globally = + list_nth_int(settings, DCS_EnableBulkDecompression); + + /* + * Check if this column is a segmentby. + */ + List *is_segmentby_column = list_nth(custom->custom_private, DCP_IsSegmentbyColumn); + const bool is_segmentby = list_nth_int(is_segmentby_column, compressed_column_index); + if (out_is_segmentby) + { + *out_is_segmentby = is_segmentby; + } + + /* + * We support vectorized aggregation either for segmentby columns or for + * columns with bulk decompression enabled. + */ + if (!is_segmentby && + !(bulk_decompression_enabled_for_column && bulk_decompression_enabled_globally)) + { + /* Vectorized aggregation not possible for this particular column. */ + return false; + } + + return true; +} + +/* + * Map the custom scan attribute number to the uncompressed chunk attribute + * number. + */ +static int +custom_scan_to_uncompressed_chunk_attno(List *custom_scan_tlist, int custom_scan_attno) +{ + if (custom_scan_tlist == NIL) + { + return custom_scan_attno; + } + + Var *var = + castNode(Var, + castNode(TargetEntry, + list_nth(custom_scan_tlist, AttrNumberGetAttrOffset(custom_scan_attno))) + ->expr); + return var->varattno; +} + +bool +vectoragg_plan_decompress_chunk(Plan *childplan, VectorQualInfo *vqi) +{ + const CustomScan *custom = castNode(CustomScan, childplan); + + vqi->rti = custom->scan.scanrelid; + + /* + * Now, we have to translate the decompressed varno into the compressed + * column index, to check if the column supports bulk decompression. + */ + List *decompression_map = list_nth(custom->custom_private, DCP_DecompressionMap); + + /* + * There's no easy way to determine maximum attribute number for uncompressed + * chunk at this stage, so we'll have to go through all the compressed columns + * for this. + */ + int maxattno = 0; + for (int compressed_column_index = 0; compressed_column_index < list_length(decompression_map); + compressed_column_index++) + { + const int custom_scan_attno = list_nth_int(decompression_map, compressed_column_index); + if (custom_scan_attno <= 0) + { + continue; + } + + const int uncompressed_chunk_attno = + custom_scan_to_uncompressed_chunk_attno(custom->custom_scan_tlist, custom_scan_attno); + + if (uncompressed_chunk_attno > maxattno) + { + maxattno = uncompressed_chunk_attno; + } + } + + vqi->vector_attrs = (bool *) palloc0(sizeof(bool) * (maxattno + 1)); + vqi->segmentby_attrs = (bool *) palloc0(sizeof(bool) * (maxattno + 1)); + + for (int compressed_column_index = 0; compressed_column_index < list_length(decompression_map); + compressed_column_index++) + { + const int custom_scan_attno = list_nth_int(decompression_map, compressed_column_index); + if (custom_scan_attno <= 0) + { + continue; + } + + const int uncompressed_chunk_attno = + custom_scan_to_uncompressed_chunk_attno(custom->custom_scan_tlist, custom_scan_attno); + + vqi->vector_attrs[uncompressed_chunk_attno] = + is_vector_compressed_column(custom, + compressed_column_index, + &vqi->segmentby_attrs[uncompressed_chunk_attno]); + } + + List *settings = linitial(custom->custom_private); + vqi->reverse = list_nth_int(settings, DCS_Reverse); + + return vqi; +} diff --git a/tsl/src/planner.c b/tsl/src/planner.c index 8160e78ee4c..5cc0aef0640 100644 --- a/tsl/src/planner.c +++ b/tsl/src/planner.c @@ -234,7 +234,7 @@ tsl_postprocess_plan(PlannedStmt *stmt) { if (ts_guc_enable_vectorized_aggregation) { - stmt->planTree = try_insert_vector_agg_node(stmt->planTree); + stmt->planTree = try_insert_vector_agg_node(stmt->planTree, stmt->rtable); } #ifdef TS_DEBUG diff --git a/tsl/test/expected/vector_agg_groupagg.out b/tsl/test/expected/vector_agg_groupagg.out index 85a4a11264e..0d2e7de70a8 100644 --- a/tsl/test/expected/vector_agg_groupagg.out +++ b/tsl/test/expected/vector_agg_groupagg.out @@ -122,7 +122,7 @@ select count(compress_chunk(x)) from show_chunks('text_table') x; 1 (1 row) -alter table text_table add column a text default 'default'; +alter table text_table add column a text collate "POSIX" default 'default'; alter table text_table set (timescaledb.compress, timescaledb.compress_segmentby = '', timescaledb.compress_orderby = 'a'); insert into text_table select 1, '' from generate_series(1, 1000) x; diff --git a/tsl/test/sql/vector_agg_groupagg.sql b/tsl/test/sql/vector_agg_groupagg.sql index 6c04205d3e6..62e73a19e0b 100644 --- a/tsl/test/sql/vector_agg_groupagg.sql +++ b/tsl/test/sql/vector_agg_groupagg.sql @@ -61,7 +61,7 @@ alter table text_table set (timescaledb.compress); insert into text_table select 0 /*, default */ from generate_series(1, 1000) x; select count(compress_chunk(x)) from show_chunks('text_table') x; -alter table text_table add column a text default 'default'; +alter table text_table add column a text collate "POSIX" default 'default'; alter table text_table set (timescaledb.compress, timescaledb.compress_segmentby = '', timescaledb.compress_orderby = 'a');