Skip to content

Commit

Permalink
[regression-test](fix) fix some fail filecache cases (#47785)
Browse files Browse the repository at this point in the history
  • Loading branch information
pingchunzhang authored and Your Name committed Feb 12, 2025
1 parent 8986571 commit d73e94d
Show file tree
Hide file tree
Showing 6 changed files with 146 additions and 13 deletions.
116 changes: 116 additions & 0 deletions regression-test/plugins/cloud_filecache_plugin.groovy
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
import groovy.json.JsonOutput
import org.apache.doris.regression.suite.Suite
import org.codehaus.groovy.runtime.IOGroovyMethods



Suite.metaClass.show_cache_hotspot = { String computeGroup = null, String table = null ->
//show cache hotspot "/<compute group>/<table>"
def select_table_hotspot = """
select
partition_id as PartitionId,
partition_name as PartitionName
from __internal_schema.cloud_cache_hotspot
where
cluster_name = "${computeGroup}"
and table_name = "${table}"
group by
cluster_id,
cluster_name,
table_id,
table_name,
partition_id,
partition_name;
"""
//show cache hotspot "/<compute group>"
def select_compute_group_hotspot = """
with t1 as (
select
cluster_id,
cluster_name,
table_id,
table_name,
insert_day,
sum(query_per_day) as query_per_day_total,
sum(query_per_week) as query_per_week_total
from __internal_schema.cloud_cache_hotspot
where cluster_name = "${computeGroup}"
group by cluster_id, cluster_name, table_id, table_name, insert_day
)
select
cluster_id as ComputeGroupId,
cluster_name as ComputeGroupName,
table_id as TableId,
table_name as TableName
from (
select
row_number() over (
partition by cluster_id
order by insert_day desc, query_per_day_total desc, query_per_week_total desc
) as dr2,
*
from t1
) t2
where dr2 = 1;
"""
//show cache hotspot "/"
def select_all_hotspot = """
with t1 as (
select
cluster_id,
cluster_name,
table_id,
table_name,
insert_day,
sum(query_per_day) as query_per_day_total,
sum(query_per_week) as query_per_week_total
from __internal_schema.cloud_cache_hotspot
group by cluster_id, cluster_name, table_id, table_name, insert_day
)
select
cluster_id as ComputeGroupId,
cluster_name as ComputeGroupName,
table_id as TableId,
table_name as TableName
from (
select
row_number() over (
partition by cluster_id
order by insert_day desc, query_per_day_total desc, query_per_week_total desc
) as dr2,
*
from t1
) t2
where dr2 = 1;
"""
def res = null
if ( computeGroup != null && table != null ){
res = sql_return_maparray """${select_table_hotspot}"""
}

if ( computeGroup != null && table = null) {
res = sql_return_maparray """${select_compute_group_hotspot}"""
}

if ( computeGroup = null && table = null) {
res = sql_return_maparray """${select_all_hotspot}"""
}
return res

}
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ suite("test_warmup_show_stmt") {
}
}
sleep(40000)
// result = sql """ show cache hotspot "/" """
// result = show_cache_hotspot()
// if (result[0][0].equals("regression_cluster_id0")) {
// assertEquals(result[0][3], "regression_test_cloud_p0_cache_multi_cluster_warm_up_hotspot.customer")
// assertEquals(result[1][3], "regression_test_cloud_p0_cache_multi_cluster_warm_up_hotspot.supplier")
Expand All @@ -112,15 +112,15 @@ suite("test_warmup_show_stmt") {
// assertEquals(result[0][3], "regression_test_cloud_p0_cache_multi_cluster_warm_up_hotspot.supplier")
// }

// try {
// sql """ show cache hotspot "/error_cluster """
// try {
// show_cache_hotspot("error_cluster")
// assertTrue(false)
// } catch (Exception e) {
// assertTrue(true)
// }

// try {
// sql """ show cache hotspot "/regression_cluster_name1/regression_test_cloud_p0_cache_multi_cluster_warm_up_hotspot.error_table """
// show_cache_hotspot("regression_cluster_name1", "regression_test_cloud_p0_cache_multi_cluster_warm_up_hotspot.error_table")
// assertTrue(false)
// } catch (Exception e) {
// assertTrue(true)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ suite("test_warmup_show_stmt_2") {
return s.getFileName() + ":" + s.getLineNumber()
}

def result = sql_return_maparray """ show cache hotspot "/" """
def result = show_cache_hotspot()
log.info(result.toString())
org.junit.Assert.assertTrue("result.size() " + result.size() + " > 0", result.size() > 0)
def hotTableName = "regression_test_cloud_p0_cache_multi_cluster_warm_up_hotspot.customer"
Expand All @@ -96,21 +96,21 @@ suite("test_warmup_show_stmt_2") {
}
org.junit.Assert.assertTrue(getLineNumber() + "cannot find expected cache hotspot ${hotTableName}", found)

result = sql_return_maparray """ show cache hotspot "/regression_cluster_name0" """
result = show_cache_hotspot("regression_cluster_name0")
log.info(result.toString())
org.junit.Assert.assertTrue(getLineNumber() + "result.size() " + result.size() + " > 0", result.size() > 0)
assertEquals(result[0].get("PartitionName"), "p3")
assertEquals(result[0].get("TableName"), "regression_test_cloud_p0_cache_multi_cluster_warm_up_hotspot.customer")
// result = sql_return_maparray """ show cache hotspot "/regression_cluster_name1" """
// result = show_cache_hotspot("regression_cluster_name1")
// assertEquals(result.size(), 0);
// not queried table should not be the hotspot
result = sql_return_maparray """ show cache hotspot "/regression_cluster_name0/regression_test_cloud_p0_cache_multi_cluster_warm_up_hotspot.supplier" """
result = show_cache_hotspot("regression_cluster_name0", "regression_test_cloud_p0_cache_multi_cluster_warm_up_hotspot.supplier" )
log.info(result.toString())
assertEquals(result.size(), 0);

sql new File("""${context.file.parent}/../ddl/${table}_delete.sql""").text
sleep(40000)
result = sql_return_maparray """ show cache hotspot "/" """
def result = show_cache_hotspot()
log.info(result.toString())
org.junit.Assert.assertTrue("result.size() " + result.size() + " > 0", result.size() > 0)
found = false
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,11 +78,11 @@ suite("test_warmup_show_stmt_3") {
}
for (int i = 0; i < 3; i++) {
sleep(40000)
result = sql """ show cache hotspot "/regression_cluster_name0/regression_test_cloud_p0_cache_multi_cluster_warm_up_hotspot.customer" """
result = show_cache_hotspot("regression_cluster_name0", "regression_test_cloud_p0_cache_multi_cluster_warm_up_hotspot.customer")
assertTrue(result.size() > 0);
}
thread.join()
sleep(40000)
result = sql """ show cache hotspot "/regression_cluster_name0/regression_test_cloud_p0_cache_multi_cluster_warm_up_hotspot.customer" """
result = show_cache_hotspot("regression_cluster_name0", "regression_test_cloud_p0_cache_multi_cluster_warm_up_hotspot.customer")
assertTrue(result.size() > 0);
}
17 changes: 17 additions & 0 deletions regression-test/suites/cloud_p0/cache/ttl/alter_ttl_1.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,7 @@ suite("alter_ttl_1") {
}
sql """ ALTER TABLE customer_ttl SET ("file_cache_ttl_seconds"="140") """
sleep(80000)
// after 110s, the first load has translate to normal
getMetricsMethod.call() {
respCode, body ->
assertEquals("${respCode}".toString(), "200")
Expand All @@ -131,6 +132,15 @@ suite("alter_ttl_1") {
for (String line in strs) {
if (flag1) break;
if (line.contains("ttl_cache_size")) {
if (line.startsWith("#")) {
continue
}
def i = line.indexOf(' ')
assertEquals(line.substring(i).toLong(), 0)

}

if (line.contains("normal_queue_cache_size")) {
if (line.startsWith("#")) {
continue
}
Expand Down Expand Up @@ -158,6 +168,13 @@ suite("alter_ttl_1") {
}
def i = line.indexOf(' ')
assertEquals(line.substring(i).toLong(), 0)
}
if (line.contains("normal_queue_cache_size")) {
if (line.startsWith("#")) {
continue
}
def i = line.indexOf(' ')
assertEquals(line.substring(i).toLong(), ttl_cache_size)
flag1 = true
}
}
Expand Down
4 changes: 2 additions & 2 deletions regression-test/suites/cloud_p0/cache/ttl/alter_ttl_2.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -161,8 +161,8 @@ suite("alter_ttl_2") {
}
assertTrue(flag1)
}
// wait for ttl timeout
sleep(40000)
// the first load data ttl is 300,so need wait for 200s until the ttl timeout
sleep(200000)
getMetricsMethod.call() {
respCode, body ->
assertEquals("${respCode}".toString(), "200")
Expand Down

0 comments on commit d73e94d

Please sign in to comment.