diff --git a/src/rust/engine/options/src/env_tests.rs b/src/rust/engine/options/src/env_tests.rs index 9f8d66581a7..17e61b2e062 100644 --- a/src/rust/engine/options/src/env_tests.rs +++ b/src/rust/engine/options/src/env_tests.rs @@ -32,13 +32,10 @@ fn test_display() { #[test] fn test_scope() { let env = env([("PANTS_PYTHON_EXAMPLE", "true")]); - assert_eq!( - true, - env - .get_bool(&option_id!(["python"], "example")) - .unwrap() - .unwrap() - ); + assert!(env + .get_bool(&option_id!(["python"], "example")) + .unwrap() + .unwrap()); } #[test] diff --git a/src/rust/engine/process_execution/docker/src/docker.rs b/src/rust/engine/process_execution/docker/src/docker.rs index a8f73879da2..17366e0b0d7 100644 --- a/src/rust/engine/process_execution/docker/src/docker.rs +++ b/src/rust/engine/process_execution/docker/src/docker.rs @@ -393,7 +393,7 @@ impl<'a> process_execution::CommandRunner for CommandRunner<'a> { let exclusive_spawn = prepare_workdir( workdir.path().to_owned(), &req, - req.input_digests.input_files.clone(), + req.input_digests.inputs.clone(), self.store.clone(), self.executor.clone(), &named_caches, diff --git a/src/rust/engine/process_execution/pe_nailgun/src/lib.rs b/src/rust/engine/process_execution/pe_nailgun/src/lib.rs index 97024889054..a1ac162c7b9 100644 --- a/src/rust/engine/process_execution/pe_nailgun/src/lib.rs +++ b/src/rust/engine/process_execution/pe_nailgun/src/lib.rs @@ -209,7 +209,7 @@ impl process_execution::CommandRunner for CommandRunner { let exclusive_spawn = prepare_workdir( nailgun_process.workdir_path().to_owned(), &client_req, - client_req.input_digests.input_files.clone(), + client_req.input_digests.inputs.clone(), self.store.clone(), self.executor.clone(), &self.named_caches, diff --git a/src/rust/engine/process_execution/pe_nailgun/src/nailgun_pool.rs b/src/rust/engine/process_execution/pe_nailgun/src/nailgun_pool.rs index 49124cee1f3..5ab746265fe 100644 --- a/src/rust/engine/process_execution/pe_nailgun/src/nailgun_pool.rs +++ b/src/rust/engine/process_execution/pe_nailgun/src/nailgun_pool.rs @@ -368,7 +368,7 @@ impl NailgunProcess { prepare_workdir( workdir.path().to_owned(), &startup_options, - startup_options.input_digests.input_files.clone(), + startup_options.input_digests.inputs.clone(), store.clone(), executor.clone(), named_caches, diff --git a/src/rust/engine/process_execution/remote/src/remote_tests.rs b/src/rust/engine/process_execution/remote/src/remote_tests.rs index e212ff85078..ab4d5eb5431 100644 --- a/src/rust/engine/process_execution/remote/src/remote_tests.rs +++ b/src/rust/engine/process_execution/remote/src/remote_tests.rs @@ -1,7 +1,6 @@ // Copyright 2022 Pants project contributors (see CONTRIBUTORS.md). // Licensed under the Apache License, Version 2.0 (see LICENSE). use std::collections::{BTreeMap, BTreeSet, HashSet}; -use std::convert::TryInto; use std::path::{Path, PathBuf}; use std::time::Duration; @@ -909,15 +908,9 @@ async fn successful_with_only_call_to_execute() { let mock_server = { let EntireExecuteRequest { execute_request, .. - } = process_execution::make_execute_request( - &execute_request.clone().try_into().unwrap(), - None, - None, - &store, - None, - ) - .await - .unwrap(); + } = process_execution::make_execute_request(&execute_request, None, None, &store, None) + .await + .unwrap(); mock::execution_server::TestServer::new( mock::execution_server::MockExecution::new(vec![ExpectedAPICall::Execute { @@ -960,15 +953,9 @@ async fn successful_after_reconnect_with_wait_execution() { let mock_server = { let EntireExecuteRequest { execute_request, .. - } = process_execution::make_execute_request( - &execute_request.clone().try_into().unwrap(), - None, - None, - &store, - None, - ) - .await - .unwrap(); + } = process_execution::make_execute_request(&execute_request, None, None, &store, None) + .await + .unwrap(); mock::execution_server::TestServer::new( mock::execution_server::MockExecution::new(vec![ @@ -1015,15 +1002,9 @@ async fn successful_after_reconnect_from_retryable_error() { let mock_server = { let EntireExecuteRequest { execute_request, .. - } = process_execution::make_execute_request( - &execute_request.clone().try_into().unwrap(), - None, - None, - &store, - None, - ) - .await - .unwrap(); + } = process_execution::make_execute_request(&execute_request, None, None, &store, None) + .await + .unwrap(); let execute_request_2 = execute_request.clone(); @@ -1080,15 +1061,9 @@ async fn creates_executing_workunit() { let mock_server = { let EntireExecuteRequest { execute_request, .. - } = process_execution::make_execute_request( - &execute_request.clone().try_into().unwrap(), - None, - None, - &store, - None, - ) - .await - .unwrap(); + } = process_execution::make_execute_request(&execute_request, None, None, &store, None) + .await + .unwrap(); mock::execution_server::TestServer::new( mock::execution_server::MockExecution::new(vec![ExpectedAPICall::Execute { @@ -1260,15 +1235,9 @@ async fn server_sending_triggering_timeout_with_deadline_exceeded() { let mock_server = { let EntireExecuteRequest { execute_request, .. - } = process_execution::make_execute_request( - &execute_request.clone().try_into().unwrap(), - None, - None, - &store, - None, - ) - .await - .unwrap(); + } = process_execution::make_execute_request(&execute_request, None, None, &store, None) + .await + .unwrap(); mock::execution_server::TestServer::new( mock::execution_server::MockExecution::new(vec![ExpectedAPICall::Execute { @@ -1314,15 +1283,9 @@ async fn sends_headers() { let mock_server = { let EntireExecuteRequest { execute_request, .. - } = process_execution::make_execute_request( - &execute_request.clone().try_into().unwrap(), - None, - None, - &store, - None, - ) - .await - .unwrap(); + } = process_execution::make_execute_request(&execute_request, None, None, &store, None) + .await + .unwrap(); mock::execution_server::TestServer::new( mock::execution_server::MockExecution::new(vec![ExpectedAPICall::Execute { @@ -1516,15 +1479,9 @@ async fn ensure_inline_stdio_is_stored() { let EntireExecuteRequest { execute_request, .. - } = process_execution::make_execute_request( - &echo_roland_request().try_into().unwrap(), - None, - None, - &store, - None, - ) - .await - .unwrap(); + } = process_execution::make_execute_request(&echo_roland_request(), None, None, &store, None) + .await + .unwrap(); mock::execution_server::TestServer::new( mock::execution_server::MockExecution::new(vec![ExpectedAPICall::Execute { @@ -1603,7 +1560,7 @@ async fn bad_result_bytes() { mock::execution_server::TestServer::new( mock::execution_server::MockExecution::new(vec![ExpectedAPICall::Execute { execute_request: process_execution::make_execute_request( - &execute_request.clone().try_into().unwrap(), + &execute_request, None, None, &store, @@ -1651,15 +1608,9 @@ async fn initial_response_error() { let EntireExecuteRequest { execute_request, .. - } = process_execution::make_execute_request( - &execute_request.clone().try_into().unwrap(), - None, - None, - &store, - None, - ) - .await - .unwrap(); + } = process_execution::make_execute_request(&execute_request, None, None, &store, None) + .await + .unwrap(); mock::execution_server::TestServer::new( mock::execution_server::MockExecution::new(vec![ExpectedAPICall::Execute { @@ -1706,15 +1657,9 @@ async fn initial_response_missing_response_and_error() { let EntireExecuteRequest { execute_request, .. - } = process_execution::make_execute_request( - &execute_request.clone().try_into().unwrap(), - None, - None, - &store, - None, - ) - .await - .unwrap(); + } = process_execution::make_execute_request(&execute_request, None, None, &store, None) + .await + .unwrap(); mock::execution_server::TestServer::new( mock::execution_server::MockExecution::new(vec![ExpectedAPICall::Execute { @@ -1752,15 +1697,9 @@ async fn fails_after_retry_limit_exceeded() { let mock_server = { let EntireExecuteRequest { execute_request, .. - } = process_execution::make_execute_request( - &execute_request.clone().try_into().unwrap(), - None, - None, - &store, - None, - ) - .await - .unwrap(); + } = process_execution::make_execute_request(&execute_request, None, None, &store, None) + .await + .unwrap(); mock::execution_server::TestServer::new( mock::execution_server::MockExecution::new(vec![ @@ -1816,15 +1755,9 @@ async fn fails_after_retry_limit_exceeded_with_stream_close() { let op_name = "foo-bar".to_owned(); let EntireExecuteRequest { execute_request, .. - } = process_execution::make_execute_request( - &execute_request.clone().try_into().unwrap(), - None, - None, - &store, - None, - ) - .await - .unwrap(); + } = process_execution::make_execute_request(&execute_request, None, None, &store, None) + .await + .unwrap(); mock::execution_server::TestServer::new( mock::execution_server::MockExecution::new(vec![ @@ -1899,15 +1832,9 @@ async fn execute_missing_file_uploads_if_known() { let EntireExecuteRequest { execute_request, .. - } = process_execution::make_execute_request( - &cat_roland_request().try_into().unwrap(), - None, - None, - &store, - None, - ) - .await - .unwrap(); + } = process_execution::make_execute_request(&cat_roland_request(), None, None, &store, None) + .await + .unwrap(); mock::execution_server::TestServer::new( mock::execution_server::MockExecution::new(vec![ @@ -1922,7 +1849,7 @@ async fn execute_missing_file_uploads_if_known() { }, ExpectedAPICall::Execute { execute_request: process_execution::make_execute_request( - &cat_roland_request().try_into().unwrap(), + &cat_roland_request(), None, None, &store, diff --git a/src/rust/engine/process_execution/src/lib.rs b/src/rust/engine/process_execution/src/lib.rs index 4f5237424b8..c53d0d1e2d9 100644 --- a/src/rust/engine/process_execution/src/lib.rs +++ b/src/rust/engine/process_execution/src/lib.rs @@ -279,8 +279,7 @@ pub struct InputDigests { /// The input files for the process execution, which will be materialized as mutable inputs in a /// sandbox for the process. /// - /// TODO: Rename to `inputs` for symmetry with `immutable_inputs`. - pub input_files: DirectoryDigest, + pub inputs: DirectoryDigest, /// Immutable input digests to make available in the input root. /// @@ -306,7 +305,7 @@ pub struct InputDigests { impl InputDigests { pub async fn new( store: &Store, - input_files: DirectoryDigest, + inputs: DirectoryDigest, immutable_inputs: BTreeMap, use_nailgun: BTreeSet, ) -> Result { @@ -330,14 +329,14 @@ impl InputDigests { } }) .collect::>(); - complete_digests.push(input_files.clone()); + complete_digests.push(inputs.clone()); let (complete, nailgun) = try_join!(store.merge(complete_digests), store.merge(nailgun_digests),)?; Ok(Self { - complete: complete, - nailgun: nailgun, - input_files, + complete, + nailgun, + inputs, immutable_inputs, use_nailgun, }) @@ -370,17 +369,17 @@ impl InputDigests { .collect(); let input_files_digests = from .iter() - .map(|input_digests| input_digests.input_files.clone()) + .map(|input_digests| input_digests.inputs.clone()) .collect(); - let (complete, nailgun, input_files) = try_join!( + let (complete, nailgun, inputs) = try_join!( store.merge(complete_digests), store.merge(nailgun_digests), store.merge(input_files_digests), )?; Ok(Self { - complete: complete, - nailgun: nailgun, - input_files: input_files, + complete, + nailgun, + inputs, immutable_inputs: merged_immutable_inputs, use_nailgun: Itertools::concat( from @@ -392,11 +391,11 @@ impl InputDigests { }) } - pub fn with_input_files(input_files: DirectoryDigest) -> Self { + pub fn with_input_files(inputs: DirectoryDigest) -> Self { Self { - complete: input_files.clone(), + complete: inputs.clone(), nailgun: EMPTY_DIRECTORY_DIGEST.clone(), - input_files, + inputs, immutable_inputs: BTreeMap::new(), use_nailgun: BTreeSet::new(), } @@ -420,7 +419,7 @@ impl InputDigests { // TODO: See method doc. complete: EMPTY_DIRECTORY_DIGEST.clone(), nailgun: EMPTY_DIRECTORY_DIGEST.clone(), - input_files: self.input_files.clone(), + inputs: self.inputs.clone(), immutable_inputs: client, use_nailgun: BTreeSet::new(), }, @@ -428,7 +427,7 @@ impl InputDigests { InputDigests { complete: self.nailgun.clone(), nailgun: EMPTY_DIRECTORY_DIGEST.clone(), - input_files: EMPTY_DIRECTORY_DIGEST.clone(), + inputs: EMPTY_DIRECTORY_DIGEST.clone(), immutable_inputs: server, use_nailgun: BTreeSet::new(), }, @@ -441,7 +440,7 @@ impl Default for InputDigests { Self { complete: EMPTY_DIRECTORY_DIGEST.clone(), nailgun: EMPTY_DIRECTORY_DIGEST.clone(), - input_files: EMPTY_DIRECTORY_DIGEST.clone(), + inputs: EMPTY_DIRECTORY_DIGEST.clone(), immutable_inputs: BTreeMap::new(), use_nailgun: BTreeSet::new(), } diff --git a/src/rust/engine/process_execution/src/local.rs b/src/rust/engine/process_execution/src/local.rs index 227b981ce59..f1733e922d4 100644 --- a/src/rust/engine/process_execution/src/local.rs +++ b/src/rust/engine/process_execution/src/local.rs @@ -281,7 +281,7 @@ impl super::CommandRunner for CommandRunner { let exclusive_spawn = prepare_workdir( workdir.path().to_owned(), &req, - req.input_digests.input_files.clone(), + req.input_digests.inputs.clone(), self.store.clone(), self.executor.clone(), &self.named_caches, diff --git a/src/rust/engine/src/intrinsics.rs b/src/rust/engine/src/intrinsics.rs index cf267149e56..584e714fa63 100644 --- a/src/rust/engine/src/intrinsics.rs +++ b/src/rust/engine/src/intrinsics.rs @@ -561,7 +561,7 @@ fn interactive_process( prepare_workdir( tempdir.path().to_owned(), &process, - process.input_digests.input_files.clone(), + process.input_digests.inputs.clone(), context.core.store(), context.core.executor.clone(), &context.core.named_caches, diff --git a/src/rust/engine/src/nodes.rs b/src/rust/engine/src/nodes.rs index f6c23f88424..04cba1e4200 100644 --- a/src/rust/engine/src/nodes.rs +++ b/src/rust/engine/src/nodes.rs @@ -301,15 +301,14 @@ impl ExecuteProcess { ) -> Result { let input_digests_fut: Result<_, String> = Python::with_gil(|py| { let value = (**value).as_ref(py); - let input_files = lift_directory_digest(externs::getattr(value, "input_digest").unwrap()) + let input_files = lift_directory_digest(externs::getattr(value, "input_digest")?) .map_err(|err| format!("Error parsing input_digest {err}"))?; let immutable_inputs = externs::getattr_from_str_frozendict::<&PyAny>(value, "immutable_input_digests") .into_iter() .map(|(path, digest)| Ok((RelativePath::new(path)?, lift_directory_digest(digest)?))) .collect::, String>>()?; - let use_nailgun = externs::getattr::>(value, "use_nailgun") - .unwrap() + let use_nailgun = externs::getattr::>(value, "use_nailgun")? .into_iter() .map(RelativePath::new) .collect::, _>>()?; @@ -339,19 +338,17 @@ impl ExecuteProcess { .map(RelativePath::new) .transpose()?; - let output_files = externs::getattr::>(value, "output_files") - .unwrap() + let output_files = externs::getattr::>(value, "output_files")? .into_iter() .map(RelativePath::new) .collect::>()?; - let output_directories = externs::getattr::>(value, "output_directories") - .unwrap() + let output_directories = externs::getattr::>(value, "output_directories")? .into_iter() .map(RelativePath::new) .collect::>()?; - let timeout_in_seconds: f64 = externs::getattr(value, "timeout_seconds").unwrap(); + let timeout_in_seconds: f64 = externs::getattr(value, "timeout_seconds")?; let timeout = if timeout_in_seconds < 0.0 { None @@ -359,8 +356,10 @@ impl ExecuteProcess { Some(Duration::from_millis((timeout_in_seconds * 1000.0) as u64)) }; - let description: String = externs::getattr(value, "description").unwrap(); - let py_level = externs::getattr(value, "level").unwrap(); + let description: String = externs::getattr(value, "description")?; + + let py_level = externs::getattr(value, "level")?; + let level = externs::val_to_log_level(py_level)?; let append_only_caches = @@ -377,17 +376,16 @@ impl ExecuteProcess { externs::getattr_as_optional_string(value, "execution_slot_variable") .map_err(|e| format!("Failed to get `execution_slot_variable` for field: {e}"))?; - let concurrency_available: usize = externs::getattr(value, "concurrency_available").unwrap(); + let concurrency_available: usize = externs::getattr(value, "concurrency_available")?; let cache_scope: ProcessCacheScope = { - let cache_scope_enum = externs::getattr(value, "cache_scope").unwrap(); - externs::getattr::(cache_scope_enum, "name") - .unwrap() - .try_into()? + let cache_scope_enum = externs::getattr(value, "cache_scope")?; + externs::getattr::(cache_scope_enum, "name")?.try_into()? }; let remote_cache_speculation_delay = std::time::Duration::from_millis( - externs::getattr::(value, "remote_cache_speculation_delay_millis").unwrap() as u64, + externs::getattr::(value, "remote_cache_speculation_delay_millis") + .map_err(|e| format!("Failed to get `name` for field: {e}"))? as u64, ); Ok(Process { @@ -407,7 +405,7 @@ impl ExecuteProcess { concurrency_available, cache_scope, execution_strategy: process_config.execution_strategy, - remote_cache_speculation_delay: remote_cache_speculation_delay, + remote_cache_speculation_delay, }) } @@ -773,17 +771,27 @@ impl Snapshot { } pub fn lift_path_globs(item: &PyAny) -> Result { - let globs: Vec = externs::getattr(item, "globs").unwrap(); + let globs: Vec = externs::getattr(item, "globs") + .map_err(|e| format!("Failed to get `globs` for field: {e}"))?; + let description_of_origin = externs::getattr_as_optional_string(item, "description_of_origin") .map_err(|e| format!("Failed to get `description_of_origin` for field: {e}"))?; - let glob_match_error_behavior = externs::getattr(item, "glob_match_error_behavior").unwrap(); - let failure_behavior: String = externs::getattr(glob_match_error_behavior, "value").unwrap(); + let glob_match_error_behavior = externs::getattr(item, "glob_match_error_behavior") + .map_err(|e| format!("Failed to get `glob_match_error_behavior` for field: {e}"))?; + + let failure_behavior: String = externs::getattr(glob_match_error_behavior, "value") + .map_err(|e| format!("Failed to get `value` for field: {e}"))?; + let strict_glob_matching = StrictGlobMatching::create(failure_behavior.as_str(), description_of_origin)?; - let conjunction_obj = externs::getattr(item, "conjunction").unwrap(); - let conjunction_string: String = externs::getattr(conjunction_obj, "value").unwrap(); + let conjunction_obj = externs::getattr(item, "conjunction") + .map_err(|e| format!("Failed to get `conjunction` for field: {e}"))?; + + let conjunction_string: String = externs::getattr(conjunction_obj, "value") + .map_err(|e| format!("Failed to get `value` for field: {e}"))?; + let conjunction = GlobExpansionConjunction::create(&conjunction_string)?; Ok(PathGlobs::new(globs, strict_glob_matching, conjunction)) } @@ -1013,10 +1021,10 @@ impl DownloadedFile { let (url_str, expected_digest, auth_headers) = Python::with_gil(|py| { let py_download_file_val = self.0.to_value(); let py_download_file = (*py_download_file_val).as_ref(py); - let url_str: String = externs::getattr(py_download_file, "url").unwrap(); + let url_str: String = externs::getattr(py_download_file, "url") + .map_err(|e| format!("Failed to get `url` for field: {e}"))?; let auth_headers = externs::getattr_from_str_frozendict(py_download_file, "auth_headers"); - let py_file_digest: PyFileDigest = - externs::getattr(py_download_file, "expected_digest").unwrap(); + let py_file_digest: PyFileDigest = externs::getattr(py_download_file, "expected_digest")?; let res: NodeResult<(String, Digest, BTreeMap)> = Ok((url_str, py_file_digest.0, auth_headers)); res