Skip to content

Commit

Permalink
Sync files for updater version 0.266.0
Browse files Browse the repository at this point in the history
Follow up to #1235
  • Loading branch information
mburumaxwell committed Jul 24, 2024
1 parent cb4564a commit c517e9e
Show file tree
Hide file tree
Showing 7 changed files with 116 additions and 208 deletions.
23 changes: 13 additions & 10 deletions updater/lib/dependabot/dependency_change.rb
Original file line number Diff line number Diff line change
Expand Up @@ -165,24 +165,32 @@ def matches_existing_pr?
if grouped_update?
# We only want PRs for the same group that have the same versions
job.existing_group_pull_requests.any? do |pr|
directories_in_use = pr["dependencies"].all? { |dep| dep["directory"] }

pr["dependency-group-name"] == dependency_group&.name &&
Set.new(pr["dependencies"]) == updated_dependencies_set
Set.new(pr["dependencies"]) == updated_dependencies_set(should_consider_directory: directories_in_use)
end
else
job.existing_pull_requests.any? { |pr| Set.new(pr) == updated_dependencies_set }
job.existing_pull_requests.any? do |pr|
directories_in_use = pr.all? { |dep| dep["directory"] }

Set.new(pr) == updated_dependencies_set(should_consider_directory: directories_in_use)
end
end
end

private

sig { returns(T::Set[T::Hash[String, T.any(String, T::Boolean)]]) }
def updated_dependencies_set
# Older PRs will not have a directory key, in that case do not consider directory in the comparison. This will
# allow rebases to continue working for those, but for multi-directory configs we do compare with the directory.
sig { params(should_consider_directory: T::Boolean).returns(T::Set[T::Hash[String, T.any(String, T::Boolean)]]) }
def updated_dependencies_set(should_consider_directory:)
Set.new(
updated_dependencies.map do |dep|
{
"dependency-name" => dep.name,
"dependency-version" => dep.version,
"directory" => should_consider_directory? ? dep.directory : nil,
"directory" => should_consider_directory ? dep.directory : nil,
"dependency-removed" => dep.removed? ? true : nil
}.compact
end
Expand All @@ -202,10 +210,5 @@ def directory

T.must(updated_dependency_files.first).directory
end

sig { returns(T::Boolean) }
def should_consider_directory?
grouped_update? && Dependabot::Experiments.enabled?("dependency_has_directory")
end
end
end
2 changes: 1 addition & 1 deletion updater/lib/dependabot/dependency_snapshot.rb
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def self.create_from_job_definition(job:, job_definition:)
file
end

if Dependabot::Experiments.enabled?(:globs) && job.source.directories
if job.source.directories
# The job.source.directory may contain globs, so we use the directories from the fetched files
job.source.directories = decoded_dependency_files.flat_map(&:directory).uniq
end
Expand Down
63 changes: 48 additions & 15 deletions updater/lib/dependabot/environment.rb
Original file line number Diff line number Diff line change
@@ -1,64 +1,97 @@
# typed: true
# typed: strict
# frozen_string_literal: true

require "sorbet-runtime"

module Dependabot
module Environment
extend T::Sig
extend T::Generic

sig { returns(String) }
def self.job_id
@job_id ||= environment_variable("DEPENDABOT_JOB_ID")
@job_id ||= T.let(environment_variable("DEPENDABOT_JOB_ID"), T.nilable(String))
end

sig { returns(String) }
def self.job_token
@job_token ||= environment_variable("DEPENDABOT_JOB_TOKEN")
@job_token ||= T.let(environment_variable("DEPENDABOT_JOB_TOKEN"), T.nilable(String))
end

sig { returns(T::Boolean) }
def self.debug_enabled?
@debug_enabled ||= job_debug_enabled? || environment_debug_enabled?
@debug_enabled ||= T.let(job_debug_enabled? || environment_debug_enabled?, T.nilable(T::Boolean))
end

sig { returns(Symbol) }
def self.log_level
debug_enabled? ? :debug : :info
end

sig { returns(String) }
def self.api_url
@api_url ||= environment_variable("DEPENDABOT_API_URL", "http://localhost:3001")
@api_url ||= T.let(environment_variable("DEPENDABOT_API_URL", "http://localhost:3001"), T.nilable(String))
end

sig { returns(String) }
def self.job_path
@job_path ||= environment_variable("DEPENDABOT_JOB_PATH")
@job_path ||= T.let(environment_variable("DEPENDABOT_JOB_PATH"), T.nilable(String))
end

sig { returns(String) }
def self.output_path
@output_path ||= environment_variable("DEPENDABOT_OUTPUT_PATH")
@output_path ||= T.let(environment_variable("DEPENDABOT_OUTPUT_PATH"), T.nilable(String))
end

sig { returns(T.nilable(String)) }
def self.repo_contents_path
@repo_contents_path ||= environment_variable("DEPENDABOT_REPO_CONTENTS_PATH", nil)
@repo_contents_path ||= T.let(environment_variable("DEPENDABOT_REPO_CONTENTS_PATH", nil), T.nilable(String))
end

sig { returns(T::Boolean) }
def self.github_actions?
@github_actions ||= environment_variable("GITHUB_ACTIONS", false)
b = T.cast(environment_variable("GITHUB_ACTIONS", false), T::Boolean)
@github_actions ||= T.let(b, T.nilable(T::Boolean))
end

sig { returns(T::Boolean) }
def self.deterministic_updates?
@deterministic_updates ||= environment_variable("UPDATER_DETERMINISTIC", false)
b = T.cast(environment_variable("UPDATER_DETERMINISTIC", false), T::Boolean)
@deterministic_updates ||= T.let(b, T.nilable(T::Boolean))
end

sig { returns(T::Hash[String, T.untyped]) }
def self.job_definition
@job_definition ||= JSON.parse(File.read(job_path))
@job_definition ||= T.let(JSON.parse(File.read(job_path)), T.nilable(T::Hash[String, T.untyped]))
end

sig do
type_parameters(:T)
.params(variable_name: String, default: T.any(Symbol, T.type_parameter(:T)))
.returns(T.any(String, T.type_parameter(:T)))
end
private_class_method def self.environment_variable(variable_name, default = :_undefined)
return ENV.fetch(variable_name, default) unless default == :_undefined

ENV.fetch(variable_name) do
raise ArgumentError, "Missing environment variable #{variable_name}"
case default
when :_undefined
ENV.fetch(variable_name) do
raise ArgumentError, "Missing environment variable #{variable_name}"
end
else
val = ENV.fetch(variable_name, T.cast(default, T.type_parameter(:T)))
case val
when String
val = T.must(val.casecmp("true")).zero? if [true, false].include? default
end
T.cast(val, T.type_parameter(:T))
end
end

sig { returns(T::Boolean) }
private_class_method def self.job_debug_enabled?
!!job_definition.dig("job", "debug")
end

sig { returns(T::Boolean) }
private_class_method def self.environment_debug_enabled?
!!environment_variable("DEPENDABOT_DEBUG", false)
end
Expand Down
15 changes: 2 additions & 13 deletions updater/lib/dependabot/file_fetcher_command.rb
Original file line number Diff line number Diff line change
Expand Up @@ -100,19 +100,8 @@ def file_fetcher_for_directory(directory)
end

def dependency_files_for_multi_directories
if Dependabot::Experiments.enabled?(:globs)
return @dependency_files_for_multi_directories ||= dependency_files_for_globs
end

@dependency_files_for_multi_directories ||= job.source.directories.flat_map do |dir|
ff = with_retries { file_fetcher_for_directory(dir) }
files = ff.files
post_ecosystem_versions(ff) if should_record_ecosystem_versions?
files
end
end
return @dependency_files_for_multi_directories if defined?(@dependency_files_for_multi_directories)

def dependency_files_for_globs
has_glob = T.let(false, T::Boolean)
directories = Dir.chdir(job.repo_contents_path) do
job.source.directories.map do |dir|
Expand All @@ -124,7 +113,7 @@ def dependency_files_for_globs
end.flatten
end.uniq

directories.flat_map do |dir|
@dependency_files_for_multi_directories = directories.flat_map do |dir|
ff = with_retries { file_fetcher_for_directory(dir) }

begin
Expand Down
51 changes: 6 additions & 45 deletions updater/lib/dependabot/updater/dependency_group_change_batch.rb
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ def initialize(initial_dependency_files:)
@updated_dependencies = []

@dependency_file_batch = initial_dependency_files.each_with_object({}) do |file, hsh|
hsh[file.path] = { file: file, updated_dependencies: [], changed: false, changes: 0 }
hsh[file.path] = { file: file, changed: false, changes: 0 }
end

@vendored_dependency_batch = {}
Expand Down Expand Up @@ -45,18 +45,8 @@ def updated_dependency_files
end

def merge(dependency_change)
# FIXME: we shouldn't have to rely on this but because CreateGroupUpdatePullRequest explicitly checks
# the DependencyChange.updated_dependencies, we need to add the updated dependencies to the global list
merge_dependency_changes(dependency_change.updated_dependencies)

if Dependabot::Experiments.enabled?(:dependency_has_directory)
merge_file_and_dependency_changes(
dependency_change.updated_dependencies,
dependency_change.updated_dependency_files
)
else
merge_file_changes(dependency_change.updated_dependency_files)
end
merge_file_changes(dependency_change.updated_dependency_files)

Dependabot.logger.debug("Dependencies updated:")
debug_updated_dependencies
Expand All @@ -72,7 +62,9 @@ def add_updated_dependency(dependency)

private

# We should retain a list of all dependencies that we change.
# We should retain a list of all dependencies that we change, in future we may need to account for the folder
# in which these changes are made to permit-cross folder updates of the same dependency.
#
# This list may contain duplicates if we make iterative updates to a Dependency within a single group, but
# rather than re-write the Dependency objects to account for the changes from the lowest previous version
# to the final version, we should defer it to the Dependabot::PullRequestCreator::MessageBuilder as a
Expand Down Expand Up @@ -100,38 +92,7 @@ def merge_file_to_batch(file, batch)
0
end

batch[file.path] = {
file: file,
updated_dependencies: batch.dig(file.path, :updated_dependencies) || [],
changed: true,
changes: change_count + 1
}
end

def merge_file_and_dependency_changes(updated_dependencies, updated_dependency_files)
updated_dependency_files.each do |updated_file|
if updated_file.vendored_file?
merge_file_and_dependency_changes_to_batch(updated_file, @vendored_dependency_batch, updated_dependencies)
else
merge_file_and_dependency_changes_to_batch(updated_file, @dependency_file_batch, updated_dependencies)
end
end
end

def merge_file_and_dependency_changes_to_batch(file, batch, updated_dependencies)
change_count = if (existing_file = batch[file.path])
existing_file.fetch(:change_count, 0)
else
# The file is newly encountered
Dependabot.logger.debug("File #{file.operation}d: '#{file.path}'")
0
end

previous_updated_dependencies = batch.dig(file.path, :updated_dependencies) || []
updated_dependencies_list = previous_updated_dependencies.concat(updated_dependencies)

batch[file.path] =
{ file: file, updated_dependencies: updated_dependencies_list, changed: true, changes: change_count + 1 }
batch[file.path] = { file: file, changed: true, changes: change_count + 1 }
end

def debug_updated_dependencies
Expand Down
Loading

0 comments on commit c517e9e

Please sign in to comment.