Skip to content

Commit

Permalink
Move AI autofix logic into transformers so suggestions can be cached (#…
Browse files Browse the repository at this point in the history
…1519)

My original attempt at this implementation was manually caching the
autofix() step, but it seemed very messy (that impl is not provided in
this PR).

This approach feels a lot cleaner because it utilizes IssueTransformer
to mutate the issue. We can also hook directly into progress to provide
a much more understandable inline UX.
  • Loading branch information
lsegal authored Feb 21, 2025
1 parent 41fa06f commit 88c5384
Show file tree
Hide file tree
Showing 6 changed files with 95 additions and 223 deletions.
6 changes: 3 additions & 3 deletions qlty-check/src/cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -275,15 +275,15 @@ impl IssuesCacheKey {
cache_busters.insert(path, contents);
}

let mut repository_sha: Option<String> = None;
let mut repository_tree_sha: Option<String> = None;
let mut dirty_paths = Vec::new();
if let Ok(repository) = Workspace::new().and_then(|w| w.repo()) {
repository_sha = Self::repository_sha(&repository).ok();
repository_tree_sha = Self::repository_sha(&repository).ok();
dirty_paths = Self::collect_dirty_paths(&repository);
}

Self {
repository_tree_sha: repository_sha,
repository_tree_sha,
dirty_paths,
digest: InvocationCacheKey {
qlty_version: QLTY_VERSION.to_string(),
Expand Down
19 changes: 13 additions & 6 deletions qlty-check/src/executor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ mod invocation_script;
pub mod staging_area;

use self::staging_area::{load_config_file_from_qlty_dir, load_config_file_from_repository};
use crate::llm::Fixer;
use crate::planner::check_filters::CheckFilters;
use crate::planner::config_files::config_globset;
use crate::planner::source_extractor::SourceExtractor;
Expand Down Expand Up @@ -136,6 +137,10 @@ impl Executor {
staging_area: self.plan.staging_area.clone(),
}));

if self.plan.settings.ai {
transformers.push(Box::new(Fixer::new(&self.plan, self.progress.clone())));
}

if !self.plan.invocations.is_empty() {
let loaded_config_files = self.stage_workspace_entries()?;
invocations = self.run_invocations(&transformers)?;
Expand Down Expand Up @@ -623,10 +628,10 @@ fn run_invocation(
) -> Result<InvocationResult> {
let task = progress.task(&plan.plugin_name, &plan.description());
let mut result = plan.driver.run(&plan, &task)?;
let mut issue_limit_reached = HashSet::<PathBuf>::new();
let issue_limit_reached = Arc::new(Mutex::new(HashSet::<PathBuf>::new()));

if let Some(file_results) = result.file_results.as_mut() {
for file_result in file_results {
file_results.par_iter_mut().for_each(|file_result| {
if file_result.issues.len() >= MAX_ISSUES_PER_FILE {
warn!(
"{} on {:?} produced too many results ({} > {}), dropping all issues from file.",
Expand All @@ -635,15 +640,16 @@ fn run_invocation(
file_result.issues.len(),
MAX_ISSUES_PER_FILE
);
issue_limit_reached.insert(PathBuf::from(&file_result.path));
issue_limit_reached.lock().unwrap().insert(PathBuf::from(&file_result.path));
file_result.issues.truncate(MAX_ISSUES_PER_FILE);
file_result.issues.shrink_to_fit();
continue;
return;
}

file_result.issues = file_result
.issues
.drain(..)
.par_iter()
.cloned()
.filter_map(|mut issue| {
for transformer in transformers {
if let Some(transformed_issue) = transformer.transform(issue) {
Expand All @@ -655,7 +661,7 @@ fn run_invocation(
Some(issue)
})
.collect();
}
});
}

if plan.driver.cache_results {
Expand All @@ -665,6 +671,7 @@ fn run_invocation(
progress.increment(plan.workspace_entries.len() as u64);
task.clear();

let issue_limit_reached = issue_limit_reached.lock().unwrap();
if !issue_limit_reached.is_empty() {
result.push_message(
MessageLevel::Error,
Expand Down
Loading

0 comments on commit 88c5384

Please sign in to comment.