Skip to content

Commit

Permalink
[enhancement](Nereids) support reuse sql cache between different comm…
Browse files Browse the repository at this point in the history
…ent (#40065)

cherry pick from #40049
  • Loading branch information
924060929 authored Aug 28, 2024
1 parent c9aca7d commit 688b97e
Show file tree
Hide file tree
Showing 5 changed files with 89 additions and 12 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
import org.apache.doris.nereids.SqlCacheContext.ScanTable;
import org.apache.doris.nereids.StatementContext;
import org.apache.doris.nereids.analyzer.UnboundVariable;
import org.apache.doris.nereids.parser.NereidsParser;
import org.apache.doris.nereids.properties.PhysicalProperties;
import org.apache.doris.nereids.rules.analysis.ExpressionAnalyzer;
import org.apache.doris.nereids.rules.analysis.UserAuthentication;
Expand Down Expand Up @@ -127,7 +128,7 @@ public void tryAddFeSqlCache(ConnectContext connectContext, String sql) {
SqlCacheContext sqlCacheContext = sqlCacheContextOpt.get();
UserIdentity currentUserIdentity = connectContext.getCurrentUserIdentity();
String key = sqlCacheContext.getCacheKeyType() == CacheKeyType.SQL
? currentUserIdentity.toString() + ":" + sql.trim()
? currentUserIdentity.toString() + ":" + normalizeSql(sql.trim())
: currentUserIdentity.toString() + ":" + DebugUtil.printId(sqlCacheContext.getOrComputeCacheKeyMd5());
if (sqlCaches.getIfPresent(key) == null && sqlCacheContext.getOrComputeCacheKeyMd5() != null
&& sqlCacheContext.getResultSetInFe().isPresent()) {
Expand All @@ -147,7 +148,7 @@ public void tryAddBeCache(ConnectContext connectContext, String sql, CacheAnalyz
SqlCacheContext sqlCacheContext = sqlCacheContextOpt.get();
UserIdentity currentUserIdentity = connectContext.getCurrentUserIdentity();
String key = sqlCacheContext.getCacheKeyType() == CacheKeyType.SQL
? currentUserIdentity.toString() + ":" + sql.trim()
? currentUserIdentity.toString() + ":" + normalizeSql(sql.trim())
: currentUserIdentity.toString() + ":" + DebugUtil.printId(sqlCacheContext.getOrComputeCacheKeyMd5());
if (sqlCaches.getIfPresent(key) == null && sqlCacheContext.getOrComputeCacheKeyMd5() != null) {
SqlCache cache = (SqlCache) analyzer.getCache();
Expand All @@ -168,7 +169,7 @@ public void tryAddBeCache(ConnectContext connectContext, String sql, CacheAnalyz
/** tryParseSql */
public Optional<LogicalSqlCache> tryParseSql(ConnectContext connectContext, String sql) {
UserIdentity currentUserIdentity = connectContext.getCurrentUserIdentity();
String key = currentUserIdentity + ":" + sql.trim();
String key = currentUserIdentity + ":" + normalizeSql(sql.trim());
SqlCacheContext sqlCacheContext = sqlCaches.getIfPresent(key);
if (sqlCacheContext == null) {
return Optional.empty();
Expand Down Expand Up @@ -201,6 +202,10 @@ public Optional<LogicalSqlCache> tryParseSql(ConnectContext connectContext, Stri
}
}

private String normalizeSql(String sql) {
return NereidsParser.removeCommentAndTrimBlank(sql);
}

private Optional<LogicalSqlCache> tryParseSqlWithoutCheckVariable(
ConnectContext connectContext, String key,
SqlCacheContext sqlCacheContext, UserIdentity currentUserIdentity) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.Recognizer;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenSource;
import org.antlr.v4.runtime.atn.PredictionMode;
Expand Down Expand Up @@ -334,4 +335,42 @@ public static ParserRuleContext toAst(String sql, Function<DorisParser, ParserRu
}
return tree;
}

/**
* removeCommentAndTrimBlank
*
* for example: select \/*+SET_VAR(key=value)*\/ \/* trace_id: 1234 *\/ *, a, \n b from table
*
* will be normalized to: select \/*+SET_VAR(key=value)*\/ * , a, b from table
*/
public static String removeCommentAndTrimBlank(String sql) {
DorisLexer lexer = new DorisLexer(new CaseInsensitiveStream(CharStreams.fromString(sql)));
CommonTokenStream tokenStream = new CommonTokenStream(lexer);
tokenStream.fill();

// maybe add more space char
StringBuilder newSql = new StringBuilder((int) (sql.length() * 1.2));

for (Token token : tokenStream.getTokens()) {
int tokenType = token.getType();
switch (tokenType) {
case DorisLexer.SIMPLE_COMMENT:
case DorisLexer.WS:
case Recognizer.EOF:
break;
case DorisLexer.BRACKETED_COMMENT:
String bracketedComment = token.getText();
// append hint
if (bracketedComment.startsWith("/*+")) {
newSql.append(bracketedComment);
newSql.append(" ");
}
break;
default:
newSql.append(token.getText());
newSql.append(" ");
}
}
return newSql.toString().trim();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,15 @@

package org.apache.doris.regression.util

import com.google.common.collect.Sets

class LoggerUtils {
static Tuple2<Integer, String> getErrorInfo(Throwable t, File file) {
if (file.name.endsWith(".groovy")) {
def st = findRootErrorStackTrace(t, Sets.newLinkedHashSet(), file)
int lineNumber = -1
for (def st : t.getStackTrace()) {
if (Objects.equals(st.fileName, file.name)) {
lineNumber = st.getLineNumber()
break
}
if (!st.is(null)) {
lineNumber = st.getLineNumber()
}
if (lineNumber == -1) {
return new Tuple2<Integer, String>(null, null)
Expand All @@ -40,4 +40,23 @@ class LoggerUtils {
return new Tuple2<Integer, String>(null, null)
}
}

static StackTraceElement findRootErrorStackTrace(Throwable t, Set<Throwable> throwables, File file) {
throwables.add(t)

def cause = t.getCause()
if (!cause.is(null) && !throwables.contains(cause)) {
def foundStackTrace = findRootErrorStackTrace(cause, throwables, file)
if (!foundStackTrace.is(null)) {
return foundStackTrace
}
}

for (def st : t.getStackTrace()) {
if (Objects.equals(st.fileName, file.name)) {
return st
}
}
return null
}
}
2 changes: 2 additions & 0 deletions regression-test/plugins/test_helper.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,8 @@ Suite.metaClass.createTestTable = { String tableName, boolean uniqueTable = fals
(4, 1), (4, 2),
(5, 1), (5, 2)
"""

sql "sync"
}


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -674,10 +674,7 @@ suite("parse_sql_from_sql_cache") {
def result2 = sql "select * from (select $randomInt as id)a"
assertTrue(result2.size() == 1)

assertNoCache "select * from test_use_plan_cache20 limit 0"
def result3 = sql "select * from test_use_plan_cache20 limit 0"
assertTrue(result3.isEmpty())

sql "select * from test_use_plan_cache20 limit 0"
assertHasCache "select * from test_use_plan_cache20 limit 0"
def result4 = sql "select * from test_use_plan_cache20 limit 0"
assertTrue(result4.isEmpty())
Expand Down Expand Up @@ -723,6 +720,21 @@ suite("parse_sql_from_sql_cache") {
assertNoCache "select * from test_use_plan_cache21"
def result2 = sql "select * from test_use_plan_cache21"
assertTrue(result2.size() == 1)
}),
extraThread("remove_comment", {
createTestTable "test_use_plan_cache22"

// after partition changed 10s, the sql cache can be used
sleep(10000)

sql "set enable_nereids_planner=true"
sql "set enable_fallback_to_original_planner=false"
sql "set enable_sql_cache=true"

assertNoCache "select /*+SET_VAR(disable_nereids_rules='')*/ /*comment2*/ * from test_use_plan_cache22 order by 1, 2"
sql "select /*+SET_VAR(disable_nereids_rules='')*/ /*comment1*/ * from test_use_plan_cache22 order by 1, 2"

assertHasCache "select /*+SET_VAR(disable_nereids_rules='')*/ /*comment2*/ * from test_use_plan_cache22 order by 1, 2"
})
).get()
}

0 comments on commit 688b97e

Please sign in to comment.