diff --git a/executor/tiflashtest/BUILD.bazel b/executor/tiflashtest/BUILD.bazel index 00c3364890678..0af1c5394cfa7 100644 --- a/executor/tiflashtest/BUILD.bazel +++ b/executor/tiflashtest/BUILD.bazel @@ -9,7 +9,7 @@ go_test( ], flaky = True, race = "on", - shard_count = 38, + shard_count = 39, deps = [ "//config", "//domain", diff --git a/executor/tiflashtest/tiflash_test.go b/executor/tiflashtest/tiflash_test.go index 7a99bab37c86d..9c6b9084223a3 100644 --- a/executor/tiflashtest/tiflash_test.go +++ b/executor/tiflashtest/tiflash_test.go @@ -1738,3 +1738,35 @@ func TestMppStoreCntWithErrors(t *testing.T) { require.Nil(t, failpoint.Disable(mppStoreCountSetLastUpdateTimeP2)) require.Nil(t, failpoint.Disable(mppStoreCountPDError)) } + +func TestIndexMergeCarePreferTiflash(t *testing.T) { + store := testkit.CreateMockStore(t, withMockTiFlash(1)) + tk := testkit.NewTestKit(t, store) + tk.MustExec("use test") + + tk.MustExec("drop table if exists t") + tk.MustExec("CREATE TABLE `t` (" + + "`i` bigint(20) NOT NULL, " + + "`w` varchar(32) NOT NULL," + + "`l` varchar(32) NOT NULL," + + "`a` tinyint(4) NOT NULL DEFAULT '0'," + + "`m` int(11) NOT NULL DEFAULT '0'," + + "`s` int(11) NOT NULL DEFAULT '0'," + + "PRIMARY KEY (`i`) /*T![clustered_index] NONCLUSTERED */," + + "KEY `idx_win_user_site_code` (`w`,`m`)," + + "KEY `idx_lose_user_site_code` (`l`,`m`)," + + "KEY `idx_win_site_code_status` (`w`,`a`)," + + "KEY `idx_lose_site_code_status` (`l`,`a`)" + + ")") + tk.MustExec("alter table t set tiflash replica 1") + tb := external.GetTableByName(t, tk, "test", "t") + err := domain.GetDomain(tk.Session()).DDL().UpdateTableReplicaInfo(tk.Session(), tb.Meta().ID, true) + require.NoError(t, err) + tk.MustQuery("explain format=\"brief\" SELECT" + + " /*+ read_from_storage(tiflash[a]) */ a.i FROM t a WHERE a.s = 0 AND a.a NOT IN (-1, 0) AND m >= 1726910326 AND m <= 1726910391 AND ( a.w IN ('1123') OR a.l IN ('1123'))").Check( + testkit.Rows("TableReader 0.00 root MppVersion: 1, data:ExchangeSender", + "└─ExchangeSender 0.00 mpp[tiflash] ExchangeType: PassThrough", + " └─Projection 0.00 mpp[tiflash] test.t.i", + " └─Selection 0.00 mpp[tiflash] ge(test.t.m, 1726910326), le(test.t.m, 1726910391), not(in(test.t.a, -1, 0)), or(eq(test.t.w, \"1123\"), eq(test.t.l, \"1123\"))", + " └─TableFullScan 10.00 mpp[tiflash] table:a pushed down filter:eq(test.t.s, 0), keep order:false, stats:pseudo")) +} diff --git a/planner/core/casetest/testdata/plan_suite_out.json b/planner/core/casetest/testdata/plan_suite_out.json index 214a24a6fb1c0..16e2507456f5b 100644 --- a/planner/core/casetest/testdata/plan_suite_out.json +++ b/planner/core/casetest/testdata/plan_suite_out.json @@ -406,11 +406,11 @@ "└─ExchangeSender 4439.11 mpp[tiflash] ExchangeType: PassThrough", " └─Projection 4439.11 mpp[tiflash] test.t.a, Column#5", " └─Projection 4439.11 mpp[tiflash] Column#5, test.t.a", - " └─HashAgg 4439.11 mpp[tiflash] group by:test.t.a, test.t.c, funcs:sum(Column#16)->Column#5, funcs:firstrow(test.t.a)->test.t.a", + " └─HashAgg 4439.11 mpp[tiflash] group by:test.t.a, test.t.c, funcs:sum(Column#10)->Column#5, funcs:firstrow(test.t.a)->test.t.a", " └─ExchangeReceiver 4439.11 mpp[tiflash] ", " └─ExchangeSender 4439.11 mpp[tiflash] ExchangeType: HashPartition, Compression: FAST, Hash Cols: [name: test.t.a, collate: binary], [name: test.t.c, collate: binary]", - " └─HashAgg 4439.11 mpp[tiflash] group by:Column#19, Column#20, funcs:sum(Column#18)->Column#16", - " └─Projection 5548.89 mpp[tiflash] cast(test.t.b, decimal(10,0) BINARY)->Column#18, test.t.a, test.t.c", + " └─HashAgg 4439.11 mpp[tiflash] group by:Column#13, Column#14, funcs:sum(Column#12)->Column#10", + " └─Projection 5548.89 mpp[tiflash] cast(test.t.b, decimal(10,0) BINARY)->Column#12, test.t.a, test.t.c", " └─Selection 5548.89 mpp[tiflash] or(lt(test.t.b, 2), gt(test.t.a, 2))", " └─TableFullScan 10000.00 mpp[tiflash] table:t pushed down filter:empty, keep order:false, stats:pseudo" ], diff --git a/planner/core/find_best_task.go b/planner/core/find_best_task.go index 673d1463e5374..7430318b6e772 100644 --- a/planner/core/find_best_task.go +++ b/planner/core/find_best_task.go @@ -963,6 +963,10 @@ func (ds *DataSource) findBestTask(prop *property.PhysicalProperty, planCounter for _, candidate := range candidates { path := candidate.path if path.PartialIndexPaths != nil { + // prefer tiflash, while current table path is tikv, skip it. + if ds.preferStoreType&preferTiFlash != 0 && path.StoreType == kv.TiKV { + continue + } idxMergeTask, err := ds.convertToIndexMergeScan(prop, candidate, opt) if err != nil { return nil, 0, err