From 025357d474ba37a8212cf4741d894eff4d3b8cd8 Mon Sep 17 00:00:00 2001 From: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> Date: Tue, 4 Jul 2023 17:24:40 -0700 Subject: [PATCH 01/32] chore: use amazon ecr credential helper in windows appveyor (#5446) --- appveyor-windows.yml | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/appveyor-windows.yml b/appveyor-windows.yml index 886e65e0c4..28567c956f 100644 --- a/appveyor-windows.yml +++ b/appveyor-windows.yml @@ -134,7 +134,14 @@ install: # Echo final Path - "echo %PATH%" - - "IF DEFINED BY_CANARY ECHO Logging in Public ECR && aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin public.ecr.aws" + # use amazon-ecr-credential-helper + - choco install amazon-ecr-credential-helper + - ps: " + $docker_config = Get-Content $env:HOME/.docker/config.json -raw | ConvertFrom-Json; + $docker_config.credsStore = 'ecr-login'; + $docker_config | ConvertTo-Json | set-content $env:HOME/.docker/config.json; + " + - ps: "get-content $env:HOME/.docker/config.json" # claim some disk space before starting the tests - "docker system prune -a -f" From b6b4e398d5410f419cfe31c8db7eeaad4955cf00 Mon Sep 17 00:00:00 2001 From: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> Date: Tue, 4 Jul 2023 22:08:35 -0700 Subject: [PATCH 02/32] chore: bump version to 1.90.0 (#5448) --- samcli/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samcli/__init__.py b/samcli/__init__.py index 48bdee91f7..1fea4bd55f 100644 --- a/samcli/__init__.py +++ b/samcli/__init__.py @@ -2,4 +2,4 @@ SAM CLI version """ -__version__ = "1.89.0" +__version__ = "1.90.0" From 3603e1247fbe655689b1239536914fd47a2ddc74 Mon Sep 17 00:00:00 2001 From: Daniel Mil <84205762+mildaniel@users.noreply.github.com> Date: Wed, 5 Jul 2023 15:46:42 -0700 Subject: [PATCH 03/32] fix: Handler path mapping for layer-wrapped esbuild functions (#5450) * fix: Layer wrapping esbuild function handlers * Remove unused import * Use nodejs18 in tests --- samcli/lib/build/bundler.py | 4 +++ tests/end_to_end/test_runtimes_e2e.py | 27 +++++++++++++++++++ .../esbuild-datadog-integration/main.js | 8 ++++++ .../esbuild-datadog-integration/template.yaml | 27 +++++++++++++++++++ tests/unit/lib/build_module/test_bundler.py | 7 +++++ 5 files changed, 73 insertions(+) create mode 100644 tests/end_to_end/testdata/esbuild-datadog-integration/main.js create mode 100644 tests/end_to_end/testdata/esbuild-datadog-integration/template.yaml diff --git a/samcli/lib/build/bundler.py b/samcli/lib/build/bundler.py index bc3774d3b9..cd69604083 100644 --- a/samcli/lib/build/bundler.py +++ b/samcli/lib/build/bundler.py @@ -12,6 +12,7 @@ LOG = logging.getLogger(__name__) +LAYER_PREFIX = "/opt" ESBUILD_PROPERTY = "esbuild" @@ -157,6 +158,9 @@ def _should_update_handler(self, handler: str, name: str) -> bool: if not handler_filename: LOG.debug("Unable to parse handler, continuing without post-processing template.") return False + if handler_filename.startswith(LAYER_PREFIX): + LOG.debug("Skipping updating the handler path as it is pointing to a layer.") + return False expected_artifact_path = Path(self._build_dir, name, handler_filename) return not expected_artifact_path.is_file() diff --git a/tests/end_to_end/test_runtimes_e2e.py b/tests/end_to_end/test_runtimes_e2e.py index ffe304b705..9955c28341 100644 --- a/tests/end_to_end/test_runtimes_e2e.py +++ b/tests/end_to_end/test_runtimes_e2e.py @@ -1,8 +1,10 @@ +from distutils.dir_util import copy_tree from unittest import skipIf import json from pathlib import Path +import os from parameterized import parameterized_class from tests.end_to_end.end_to_end_base import EndToEndBase @@ -163,3 +165,28 @@ def test_go_hello_world_default_workflow(self): DefaultDeleteStage(BaseValidator(e2e_context), e2e_context, delete_command_list, stack_name), ] self._run_tests(stages) + + +class TestEsbuildDatadogLayerIntegration(EndToEndBase): + app_template = "" + + def test_integration(self): + function_name = "HelloWorldFunction" + event = '{"hello": "world"}' + stack_name = self._method_to_stack_name(self.id()) + with EndToEndTestContext(self.app_name) as e2e_context: + project_path = str(Path("testdata") / "esbuild-datadog-integration") + os.mkdir(e2e_context.project_directory) + copy_tree(project_path, e2e_context.project_directory) + self.template_path = e2e_context.template_path + build_command_list = self.get_command_list() + deploy_command_list = self._get_deploy_command(stack_name) + remote_invoke_command_list = self._get_remote_invoke_command(stack_name, function_name, event, "json") + delete_command_list = self._get_delete_command(stack_name) + stages = [ + EndToEndBaseStage(BuildValidator(e2e_context), e2e_context, build_command_list), + EndToEndBaseStage(BaseValidator(e2e_context), e2e_context, deploy_command_list), + EndToEndBaseStage(RemoteInvokeValidator(e2e_context), e2e_context, remote_invoke_command_list), + DefaultDeleteStage(BaseValidator(e2e_context), e2e_context, delete_command_list, stack_name), + ] + self._run_tests(stages) diff --git a/tests/end_to_end/testdata/esbuild-datadog-integration/main.js b/tests/end_to_end/testdata/esbuild-datadog-integration/main.js new file mode 100644 index 0000000000..5ba65da324 --- /dev/null +++ b/tests/end_to_end/testdata/esbuild-datadog-integration/main.js @@ -0,0 +1,8 @@ +exports.lambdaHandler = async (event, context) => { + return { + statusCode: 200, + body: JSON.stringify({ + message: 'hello world!', + }), + }; +}; diff --git a/tests/end_to_end/testdata/esbuild-datadog-integration/template.yaml b/tests/end_to_end/testdata/esbuild-datadog-integration/template.yaml new file mode 100644 index 0000000000..3341557f3e --- /dev/null +++ b/tests/end_to_end/testdata/esbuild-datadog-integration/template.yaml @@ -0,0 +1,27 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 + +# Latest extension version: https://github.com/DataDog/datadog-lambda-extension/releases +# Latest Node.js layer version: https://github.com/DataDog/datadog-lambda-js/releases + +Parameters: + DataDogLayers: + Description: DataDog layers + Type: CommaDelimitedList + Default: "arn:aws:lambda:us-east-1:464622532012:layer:Datadog-Node18-x:93, arn:aws:lambda:us-east-1:464622532012:layer:Datadog-Extension:44" + +Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + Handler: /opt/nodejs/node_modules/datadog-lambda-js/handler.handler + Runtime: nodejs18.x + Environment: + Variables: + DD_LAMBDA_HANDLER: main.lambdaHandler + Layers: !Ref DataDogLayers + Metadata: + BuildMethod: esbuild + BuildProperties: + EntryPoints: + - main.js \ No newline at end of file diff --git a/tests/unit/lib/build_module/test_bundler.py b/tests/unit/lib/build_module/test_bundler.py index 39fb1f7c32..c25543b09e 100644 --- a/tests/unit/lib/build_module/test_bundler.py +++ b/tests/unit/lib/build_module/test_bundler.py @@ -195,6 +195,13 @@ def test_check_invalid_lambda_handler_none_build_dir(self): return_val = bundler_manager._should_update_handler("", "") self.assertFalse(return_val) + def test_should_not_update_layer_path(self): + bundler_manager = EsbuildBundlerManager(Mock(), build_dir="/build/dir") + bundler_manager._get_path_and_filename_from_handler = Mock() + bundler_manager._get_path_and_filename_from_handler.return_value = "/opt/nodejs/node_modules/d/handler.handler" + return_val = bundler_manager._should_update_handler("", "") + self.assertFalse(return_val) + def test_update_function_handler(self): resources = { "FunctionA": { From cee2d3d05aa63024729d0f58e31454cc721f5c73 Mon Sep 17 00:00:00 2001 From: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Date: Wed, 5 Jul 2023 18:49:39 -0700 Subject: [PATCH 04/32] fix: fix macos reproducable task and gh actions (#5455) --- .../automated-updates-to-sam-cli.yml | 4 ++-- Makefile | 2 +- requirements/base.txt | 2 +- requirements/reproducible-linux.txt | 6 +++--- requirements/reproducible-mac.txt | 19 +++++-------------- 5 files changed, 12 insertions(+), 21 deletions(-) diff --git a/.github/workflows/automated-updates-to-sam-cli.yml b/.github/workflows/automated-updates-to-sam-cli.yml index 45c49f727a..a42123ea5b 100644 --- a/.github/workflows/automated-updates-to-sam-cli.yml +++ b/.github/workflows/automated-updates-to-sam-cli.yml @@ -75,7 +75,7 @@ jobs: - uses: actions/setup-python@v4 # used for make update-reproducible-reqs below with: python-version: | - 3.7 + 3.8 3.11 - name: Update aws-sam-translator & commit @@ -132,7 +132,7 @@ jobs: - uses: actions/setup-python@v4 # used for make update-reproducible-reqs below with: python-version: | - 3.7 + 3.8 3.11 - name: Upgrade aws_lambda_builders & commit diff --git a/Makefile b/Makefile index 8876d482b2..dd80957cae 100644 --- a/Makefile +++ b/Makefile @@ -65,7 +65,7 @@ update-reproducible-linux-reqs: venv-update-reproducible-linux/bin/pip-compile --generate-hashes --allow-unsafe -o requirements/reproducible-linux.txt update-reproducible-mac-reqs: - python3.7 -m venv venv-update-reproducible-mac + python3.8 -m venv venv-update-reproducible-mac venv-update-reproducible-mac/bin/pip install --upgrade pip-tools pip venv-update-reproducible-mac/bin/pip install -r requirements/base.txt venv-update-reproducible-mac/bin/pip-compile --generate-hashes --allow-unsafe -o requirements/reproducible-mac.txt diff --git a/requirements/base.txt b/requirements/base.txt index f0626e7145..707300f4b8 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -7,7 +7,7 @@ jmespath~=1.0.1 ruamel_yaml~=0.17.32 PyYAML>=5.4.1,==5.* cookiecutter~=2.1.1 -aws-sam-translator==1.70.0 +aws-sam-translator==1.71.0 #docker minor version updates can include breaking changes. Auto update micro version only. docker~=6.1.0 dateparser~=1.1 diff --git a/requirements/reproducible-linux.txt b/requirements/reproducible-linux.txt index f946536226..7ed8c51903 100644 --- a/requirements/reproducible-linux.txt +++ b/requirements/reproducible-linux.txt @@ -19,9 +19,9 @@ aws-lambda-builders==1.34.0 \ --hash=sha256:0790f7e9b7ee7286b96fbcf49450c5be0341bb7cb852ca7d74beae190139eb48 \ --hash=sha256:20456a942a417407b42ecf8ab7fce6a47306fd063051e7cb09d02d1be24d5cf3 # via aws-sam-cli (setup.py) -aws-sam-translator==1.70.0 \ - --hash=sha256:753288eda07b057e5350773b7617076962b59404d49cd05e2259ac96a7694436 \ - --hash=sha256:a2df321607d29791893707ef2ded9e79be00dbb71ac430696f6e6d7d0b0301a5 +aws-sam-translator==1.71.0 \ + --hash=sha256:17fb87c8137d8d49e7a978396b2b3b279211819dee44618415aab1e99c2cb659 \ + --hash=sha256:a3ea80aeb116d7978b26ac916d2a5a24d012b742bf28262b17769c4b886e8fba # via # aws-sam-cli (setup.py) # cfn-lint diff --git a/requirements/reproducible-mac.txt b/requirements/reproducible-mac.txt index a1db7a41cf..4ac1c38a21 100644 --- a/requirements/reproducible-mac.txt +++ b/requirements/reproducible-mac.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with Python 3.7 +# This file is autogenerated by pip-compile with Python 3.8 # by the following command: # # pip-compile --allow-unsafe --generate-hashes --output-file=requirements/reproducible-mac.txt @@ -19,9 +19,9 @@ aws-lambda-builders==1.34.0 \ --hash=sha256:0790f7e9b7ee7286b96fbcf49450c5be0341bb7cb852ca7d74beae190139eb48 \ --hash=sha256:20456a942a417407b42ecf8ab7fce6a47306fd063051e7cb09d02d1be24d5cf3 # via aws-sam-cli (setup.py) -aws-sam-translator==1.70.0 \ - --hash=sha256:753288eda07b057e5350773b7617076962b59404d49cd05e2259ac96a7694436 \ - --hash=sha256:a2df321607d29791893707ef2ded9e79be00dbb71ac430696f6e6d7d0b0301a5 +aws-sam-translator==1.71.0 \ + --hash=sha256:17fb87c8137d8d49e7a978396b2b3b279211819dee44618415aab1e99c2cb659 \ + --hash=sha256:a3ea80aeb116d7978b26ac916d2a5a24d012b742bf28262b17769c4b886e8fba # via # aws-sam-cli (setup.py) # cfn-lint @@ -270,12 +270,7 @@ idna==3.4 \ importlib-metadata==6.7.0 \ --hash=sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4 \ --hash=sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5 - # via - # attrs - # click - # flask - # jsonpickle - # jsonschema + # via flask importlib-resources==5.12.0 \ --hash=sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6 \ --hash=sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a @@ -722,12 +717,8 @@ typing-extensions==4.4.0 \ --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via - # arrow # aws-sam-cli (setup.py) # aws-sam-translator - # importlib-metadata - # jsonschema - # markdown-it-py # pydantic # rich tzlocal==3.0 \ From 16a1740b635d366de19cbb54dd549cd27abdbc63 Mon Sep 17 00:00:00 2001 From: Elvis Henrique Pereira Date: Thu, 6 Jul 2023 18:12:15 -0300 Subject: [PATCH 05/32] feat(sync): support build-image option (#5441) * feat(sync): support build-image option * chore: adding build image option on help option --- samcli/commands/_utils/options.py | 23 +++++++++++++++++++ samcli/commands/build/command.py | 17 ++------------ samcli/commands/sync/command.py | 12 +++++++++- samcli/commands/sync/core/options.py | 1 + .../unit/commands/samconfig/test_samconfig.py | 1 + tests/unit/commands/sync/test_command.py | 6 +++++ 6 files changed, 44 insertions(+), 16 deletions(-) diff --git a/samcli/commands/_utils/options.py b/samcli/commands/_utils/options.py index 5b1b55cc32..24f5eef3a6 100644 --- a/samcli/commands/_utils/options.py +++ b/samcli/commands/_utils/options.py @@ -796,6 +796,29 @@ def use_container_build_option(f): return use_container_build_click_option()(f) +def build_image_click_option(cls): + return click.option( + "--build-image", + "-bi", + default=None, + multiple=True, # Can pass in multiple build images + required=False, + help="Container image URIs for building functions/layers. " + "You can specify for all functions/layers with just the image URI " + "(--build-image public.ecr.aws/sam/build-nodejs18.x:latest). " + "You can specify for each individual function with " + "(--build-image FunctionLogicalID=public.ecr.aws/sam/build-nodejs18.x:latest). " + "A combination of the two can be used. If a function does not have build image specified or " + "an image URI for all functions, the default SAM CLI build images will be used.", + cls=cls, + ) + + +@parameterized_option +def build_image_option(f, cls): + return build_image_click_option(cls)(f) + + def _space_separated_list_func_type(value): if isinstance(value, str): return value.split(" ") diff --git a/samcli/commands/build/command.py b/samcli/commands/build/command.py index 92a2c6578c..a60f9954e1 100644 --- a/samcli/commands/build/command.py +++ b/samcli/commands/build/command.py @@ -20,6 +20,7 @@ manifest_option, cached_option, use_container_build_option, + build_image_option, hook_name_click_option, ) from samcli.commands._utils.option_value_processor import process_env_var, process_image_options @@ -94,21 +95,7 @@ help="Environment variables json file (e.g., env_vars.json) to be passed to build containers.", cls=ContainerOptions, ) -@click.option( - "--build-image", - "-bi", - default=None, - multiple=True, # Can pass in multiple build images - required=False, - help="Container image URIs for building functions/layers. " - "You can specify for all functions/layers with just the image URI " - "(--build-image public.ecr.aws/sam/build-nodejs18.x:latest). " - "You can specify for each individual function with " - "(--build-image FunctionLogicalID=public.ecr.aws/sam/build-nodejs18.x:latest). " - "A combination of the two can be used. If a function does not have build image specified or " - "an image URI for all functions, the default SAM CLI build images will be used.", - cls=ContainerOptions, -) +@build_image_option(cls=ContainerOptions) @click.option( "--exclude", "-x", diff --git a/samcli/commands/sync/command.py b/samcli/commands/sync/command.py index 26eccac4b3..81f1222207 100644 --- a/samcli/commands/sync/command.py +++ b/samcli/commands/sync/command.py @@ -1,7 +1,7 @@ """CLI command for "sync" command.""" import logging import os -from typing import TYPE_CHECKING, List, Optional, Set +from typing import TYPE_CHECKING, List, Optional, Set, Tuple import click @@ -18,8 +18,10 @@ DEFAULT_CACHE_DIR, ) from samcli.commands._utils.custom_options.replace_help_option import ReplaceHelpSummaryOption +from samcli.commands._utils.option_value_processor import process_image_options from samcli.commands._utils.options import ( base_dir_option, + build_image_option, capabilities_option, image_repositories_option, image_repository_option, @@ -35,6 +37,7 @@ template_option_without_build, use_container_build_option, ) +from samcli.commands.build.click_container import ContainerOptions from samcli.commands.build.command import _get_mode_value_from_envvar from samcli.commands.sync.core.command import SyncCommand from samcli.commands.sync.sync_context import SyncContext @@ -155,6 +158,7 @@ @stack_name_option(required=True) # pylint: disable=E1120 @base_dir_option @use_container_build_option +@build_image_option(cls=ContainerOptions) @image_repository_option @image_repositories_option @s3_bucket_option(disable_callback=True) # pylint: disable=E1120 @@ -202,6 +206,7 @@ def cli( use_container: bool, config_file: str, config_env: str, + build_image: Optional[Tuple[str]], ) -> None: """ `sam sync` command entry point @@ -234,6 +239,7 @@ def cli( tags, metadata, use_container, + build_image, config_file, config_env, None, # TODO: replace with build_in_source once it's added as a click option @@ -265,6 +271,7 @@ def do_cli( tags: dict, metadata: dict, use_container: bool, + build_image: Optional[Tuple[str]], config_file: str, config_env: str, build_in_source: Optional[bool], @@ -303,6 +310,8 @@ def do_cli( LOG.debug("Using build directory as %s", build_dir) EventTracker.track_event("UsedFeature", "Accelerate") + processed_build_images = process_image_options(build_image) + with BuildContext( resource_identifier=None, template_file=template_file, @@ -320,6 +329,7 @@ def do_cli( print_success_message=False, locate_layer_nested=True, build_in_source=build_in_source, + build_images=processed_build_images, ) as build_context: built_template = os.path.join(build_dir, DEFAULT_TEMPLATE_NAME) diff --git a/samcli/commands/sync/core/options.py b/samcli/commands/sync/core/options.py index 43a63e92e7..ee7ead9799 100644 --- a/samcli/commands/sync/core/options.py +++ b/samcli/commands/sync/core/options.py @@ -25,6 +25,7 @@ "notification_arns", "tags", "metadata", + "build_image", ] CONFIGURATION_OPTION_NAMES: List[str] = ["config_env", "config_file"] diff --git a/tests/unit/commands/samconfig/test_samconfig.py b/tests/unit/commands/samconfig/test_samconfig.py index 19b4a7672d..675f22a4bf 100644 --- a/tests/unit/commands/samconfig/test_samconfig.py +++ b/tests/unit/commands/samconfig/test_samconfig.py @@ -1007,6 +1007,7 @@ def test_sync( {"a": "tag1", "b": "tag with spaces"}, {"m1": "value1", "m2": "value2"}, True, + (), "samconfig.toml", "default", None, diff --git a/tests/unit/commands/sync/test_command.py b/tests/unit/commands/sync/test_command.py index b0bcede4d0..6599f04076 100644 --- a/tests/unit/commands/sync/test_command.py +++ b/tests/unit/commands/sync/test_command.py @@ -57,6 +57,7 @@ def setUp(self): self.clean = True self.config_env = "mock-default-env" self.config_file = "mock-default-filename" + self.build_image = None MOCK_SAM_CONFIG.reset_mock() @parameterized.expand( @@ -141,6 +142,7 @@ def test_infra_must_succeed_sync( self.tags, self.metadata, use_container, + self.build_image, self.config_file, self.config_env, build_in_source=False, @@ -167,6 +169,7 @@ def test_infra_must_succeed_sync( print_success_message=False, locate_layer_nested=True, build_in_source=False, + build_images={}, ) PackageContextMock.assert_called_with( @@ -298,6 +301,7 @@ def test_watch_must_succeed_sync( self.tags, self.metadata, use_container, + self.build_image, self.config_file, self.config_env, build_in_source=False, @@ -320,6 +324,7 @@ def test_watch_must_succeed_sync( print_success_message=False, locate_layer_nested=True, build_in_source=False, + build_images={}, ) PackageContextMock.assert_called_with( @@ -443,6 +448,7 @@ def test_code_must_succeed_sync( self.tags, self.metadata, use_container, + self.build_image, self.config_file, self.config_env, build_in_source=None, From 58faff0dfc597016f945ca215925ca26b950c770 Mon Sep 17 00:00:00 2001 From: Daniel Mil <84205762+mildaniel@users.noreply.github.com> Date: Thu, 6 Jul 2023 18:33:07 -0700 Subject: [PATCH 06/32] fix: Avoid Certain Depedendency Version (#5460) * Avoid broken click version * Pin boto3 and jsonschema * Update reproducible reqs * Ignore deprecation warnings in pytest * Pin jsonschema --- pytest.ini | 4 ++++ requirements/base.txt | 5 ++++- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/pytest.ini b/pytest.ini index 9d19677545..80263254ba 100644 --- a/pytest.ini +++ b/pytest.ini @@ -6,3 +6,7 @@ filterwarnings = error ignore::DeprecationWarning:docker default::ResourceWarning +; The following deprecation warnings are treated as failures unless we explicitly tell pytest not to +; Remove once we no longer support python3.7 + ignore::boto3.exceptions.PythonDeprecationWarning + diff --git a/requirements/base.txt b/requirements/base.txt index 707300f4b8..38532b4714 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1,5 +1,6 @@ chevron~=0.12 -click~=8.0 +# 8.1.4 of Click has an issue with the typing breaking the linter - https://github.com/pallets/click/issues/2558 +click~=8.0,!=8.1.4 Flask<2.3 #Need to add latest lambda changes which will return invoke mode details boto3>=1.26.109,==1.* @@ -18,6 +19,8 @@ tomlkit==0.11.8 watchdog==2.1.2 rich~=13.3.3 pyopenssl~=23.2.0 +# Pin to <4.18 to until SAM-T no longer uses RefResolver +jsonschema<4.18 # Needed for supporting Protocol in Python 3.7, Protocol class became public with python3.8 typing_extensions~=4.4.0 From cca9f930e54d33ab83475f46f0802c731dcd8609 Mon Sep 17 00:00:00 2001 From: Daniel Mil <84205762+mildaniel@users.noreply.github.com> Date: Fri, 7 Jul 2023 09:59:58 -0700 Subject: [PATCH 07/32] Fix broken e2e test (#5459) --- tests/end_to_end/end_to_end_base.py | 4 ++++ tests/end_to_end/test_runtimes_e2e.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/end_to_end/end_to_end_base.py b/tests/end_to_end/end_to_end_base.py index 926a325e86..97ad1cf3c1 100644 --- a/tests/end_to_end/end_to_end_base.py +++ b/tests/end_to_end/end_to_end_base.py @@ -1,3 +1,5 @@ +from pathlib import Path + import os from typing import List @@ -25,9 +27,11 @@ class EndToEndBase(InitIntegBase, StackOutputsIntegBase, DeleteIntegBase, SyncIn def setUp(self): super().setUp() + e2e_dir = Path(__file__).resolve().parent self.stacks = [] self.config_file_dir = GlobalConfig().config_dir self._create_config_dir() + self.e2e_test_data_path = Path(e2e_dir, "testdata") def _create_config_dir(self): # Init tests will lock the config dir, ensure it exists before obtaining a lock diff --git a/tests/end_to_end/test_runtimes_e2e.py b/tests/end_to_end/test_runtimes_e2e.py index 9955c28341..b5d7346161 100644 --- a/tests/end_to_end/test_runtimes_e2e.py +++ b/tests/end_to_end/test_runtimes_e2e.py @@ -175,7 +175,7 @@ def test_integration(self): event = '{"hello": "world"}' stack_name = self._method_to_stack_name(self.id()) with EndToEndTestContext(self.app_name) as e2e_context: - project_path = str(Path("testdata") / "esbuild-datadog-integration") + project_path = str(self.e2e_test_data_path / "esbuild-datadog-integration") os.mkdir(e2e_context.project_directory) copy_tree(project_path, e2e_context.project_directory) self.template_path = e2e_context.template_path From 0bd4c58c6f432266f2ed17705bc6a57cf1c736a0 Mon Sep 17 00:00:00 2001 From: Wing Fung Lau <4760060+hawflau@users.noreply.github.com> Date: Fri, 7 Jul 2023 11:35:32 -0700 Subject: [PATCH 08/32] add pip check after all pip install in pyinstaller scripts (#5463) * add pip check after all pip install in pyinstaller scripts * update pyinstaller build scripts to explicitly print executed commands --- installer/pyinstaller/build-linux.sh | 3 ++- installer/pyinstaller/build-mac.sh | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/installer/pyinstaller/build-linux.sh b/installer/pyinstaller/build-linux.sh index 041d4d190d..2b728866da 100755 --- a/installer/pyinstaller/build-linux.sh +++ b/installer/pyinstaller/build-linux.sh @@ -26,7 +26,7 @@ else is_nightly="false" fi -set -eu +set -eux yum install -y zlib-devel libffi-devel bzip2-devel @@ -83,6 +83,7 @@ cp -r ./venv/lib/python*/site-packages/* ./output/python-libraries echo "Installing PyInstaller" ./venv/bin/pip install -r src/requirements/pyinstaller-build.txt +./venv/bin/pip check echo "Building Binary" cd src diff --git a/installer/pyinstaller/build-mac.sh b/installer/pyinstaller/build-mac.sh index eafcb14f31..7a69a6b0a4 100644 --- a/installer/pyinstaller/build-mac.sh +++ b/installer/pyinstaller/build-mac.sh @@ -45,7 +45,7 @@ else is_nightly="false" fi -set -eu +set -eux echo "Making Folders" mkdir -p .build/src @@ -93,6 +93,7 @@ cp -r ./venv/lib/python*/site-packages/* ./output/python-libraries echo "Installing PyInstaller" ./venv/bin/pip install -r src/requirements/pyinstaller-build.txt +./venv/bin/pip check # Building the binary using pyinstaller echo "Building Binary" From f07e80d198de0c1b7758589999191963ad544731 Mon Sep 17 00:00:00 2001 From: Efe Karakus Date: Fri, 7 Jul 2023 14:44:29 -0700 Subject: [PATCH 09/32] chore: fix typo in help text of `pipeline bootstrap` (#5467) --- samcli/commands/pipeline/bootstrap/cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samcli/commands/pipeline/bootstrap/cli.py b/samcli/commands/pipeline/bootstrap/cli.py index d357adf5dd..9b7db02578 100644 --- a/samcli/commands/pipeline/bootstrap/cli.py +++ b/samcli/commands/pipeline/bootstrap/cli.py @@ -28,7 +28,7 @@ HELP_TEXT = """ This command generates the required AWS infrastructure resources to connect to your CI/CD system. -This step must be run for each deployment stage in your pipeline, prior to running the sam pipline init command. +This step must be run for each deployment stage in your pipeline, prior to running the sam pipeline init command. """ PIPELINE_CONFIG_DIR = os.path.join(".aws-sam", "pipeline") From 055b727b2aaa0a577d7ede9e355c9a667e5a198e Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 7 Jul 2023 23:01:54 +0000 Subject: [PATCH 10/32] chore: update aws-sam-translator to 1.71.0 (#5462) Co-authored-by: GitHub Action --- requirements/reproducible-linux.txt | 1 + requirements/reproducible-mac.txt | 1 + 2 files changed, 2 insertions(+) diff --git a/requirements/reproducible-linux.txt b/requirements/reproducible-linux.txt index 7ed8c51903..6f14c44c60 100644 --- a/requirements/reproducible-linux.txt +++ b/requirements/reproducible-linux.txt @@ -291,6 +291,7 @@ jsonschema==4.17.3 \ --hash=sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d \ --hash=sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6 # via + # aws-sam-cli (setup.py) # aws-sam-translator # cfn-lint junit-xml==1.9 \ diff --git a/requirements/reproducible-mac.txt b/requirements/reproducible-mac.txt index 4ac1c38a21..a922563c91 100644 --- a/requirements/reproducible-mac.txt +++ b/requirements/reproducible-mac.txt @@ -317,6 +317,7 @@ jsonschema==4.17.3 \ --hash=sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d \ --hash=sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6 # via + # aws-sam-cli (setup.py) # aws-sam-translator # cfn-lint junit-xml==1.9 \ From c78c617ad7faa08fc13e99d31d5afe196db49309 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Jul 2023 11:43:39 -0700 Subject: [PATCH 11/32] chore(deps): bump cryptography from 41.0.0 to 41.0.1 in /requirements (#5473) Bumps [cryptography](https://github.com/pyca/cryptography) from 41.0.0 to 41.0.1. - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/41.0.0...41.0.1) --- updated-dependencies: - dependency-name: cryptography dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/reproducible-linux.txt | 40 ++++++++++++++--------------- requirements/reproducible-mac.txt | 40 ++++++++++++++--------------- 2 files changed, 40 insertions(+), 40 deletions(-) diff --git a/requirements/reproducible-linux.txt b/requirements/reproducible-linux.txt index 6f14c44c60..a5fd1d3868 100644 --- a/requirements/reproducible-linux.txt +++ b/requirements/reproducible-linux.txt @@ -212,26 +212,26 @@ cookiecutter==2.1.1 \ --hash=sha256:9f3ab027cec4f70916e28f03470bdb41e637a3ad354b4d65c765d93aad160022 \ --hash=sha256:f3982be8d9c53dac1261864013fdec7f83afd2e42ede6f6dd069c5e149c540d5 # via aws-sam-cli (setup.py) -cryptography==41.0.0 \ - --hash=sha256:0ddaee209d1cf1f180f1efa338a68c4621154de0afaef92b89486f5f96047c55 \ - --hash=sha256:14754bcdae909d66ff24b7b5f166d69340ccc6cb15731670435efd5719294895 \ - --hash=sha256:344c6de9f8bda3c425b3a41b319522ba3208551b70c2ae00099c205f0d9fd3be \ - --hash=sha256:34d405ea69a8b34566ba3dfb0521379b210ea5d560fafedf9f800a9a94a41928 \ - --hash=sha256:3680248309d340fda9611498a5319b0193a8dbdb73586a1acf8109d06f25b92d \ - --hash=sha256:3c5ef25d060c80d6d9f7f9892e1d41bb1c79b78ce74805b8cb4aa373cb7d5ec8 \ - --hash=sha256:4ab14d567f7bbe7f1cdff1c53d5324ed4d3fc8bd17c481b395db224fb405c237 \ - --hash=sha256:5c1f7293c31ebc72163a9a0df246f890d65f66b4a40d9ec80081969ba8c78cc9 \ - --hash=sha256:6b71f64beeea341c9b4f963b48ee3b62d62d57ba93eb120e1196b31dc1025e78 \ - --hash=sha256:7d92f0248d38faa411d17f4107fc0bce0c42cae0b0ba5415505df72d751bf62d \ - --hash=sha256:8362565b3835ceacf4dc8f3b56471a2289cf51ac80946f9087e66dc283a810e0 \ - --hash=sha256:84a165379cb9d411d58ed739e4af3396e544eac190805a54ba2e0322feb55c46 \ - --hash=sha256:88ff107f211ea696455ea8d911389f6d2b276aabf3231bf72c8853d22db755c5 \ - --hash=sha256:9f65e842cb02550fac96536edb1d17f24c0a338fd84eaf582be25926e993dde4 \ - --hash=sha256:a4fc68d1c5b951cfb72dfd54702afdbbf0fb7acdc9b7dc4301bbf2225a27714d \ - --hash=sha256:b7f2f5c525a642cecad24ee8670443ba27ac1fab81bba4cc24c7b6b41f2d0c75 \ - --hash=sha256:b846d59a8d5a9ba87e2c3d757ca019fa576793e8758174d3868aecb88d6fc8eb \ - --hash=sha256:bf8fc66012ca857d62f6a347007e166ed59c0bc150cefa49f28376ebe7d992a2 \ - --hash=sha256:f5d0bf9b252f30a31664b6f64432b4730bb7038339bd18b1fafe129cfc2be9be +cryptography==41.0.1 \ + --hash=sha256:059e348f9a3c1950937e1b5d7ba1f8e968508ab181e75fc32b879452f08356db \ + --hash=sha256:1a5472d40c8f8e91ff7a3d8ac6dfa363d8e3138b961529c996f3e2df0c7a411a \ + --hash=sha256:1a8e6c2de6fbbcc5e14fd27fb24414507cb3333198ea9ab1258d916f00bc3039 \ + --hash=sha256:1fee5aacc7367487b4e22484d3c7e547992ed726d14864ee33c0176ae43b0d7c \ + --hash=sha256:5d092fdfedaec4cbbffbf98cddc915ba145313a6fdaab83c6e67f4e6c218e6f3 \ + --hash=sha256:5f0ff6e18d13a3de56f609dd1fd11470918f770c6bd5d00d632076c727d35485 \ + --hash=sha256:7bfc55a5eae8b86a287747053140ba221afc65eb06207bedf6e019b8934b477c \ + --hash=sha256:7fa01527046ca5facdf973eef2535a27fec4cb651e4daec4d043ef63f6ecd4ca \ + --hash=sha256:8dde71c4169ec5ccc1087bb7521d54251c016f126f922ab2dfe6649170a3b8c5 \ + --hash=sha256:8f4ab7021127a9b4323537300a2acfb450124b2def3756f64dc3a3d2160ee4b5 \ + --hash=sha256:948224d76c4b6457349d47c0c98657557f429b4e93057cf5a2f71d603e2fc3a3 \ + --hash=sha256:9a6c7a3c87d595608a39980ebaa04d5a37f94024c9f24eb7d10262b92f739ddb \ + --hash=sha256:b46e37db3cc267b4dea1f56da7346c9727e1209aa98487179ee8ebed09d21e43 \ + --hash=sha256:b4ceb5324b998ce2003bc17d519080b4ec8d5b7b70794cbd2836101406a9be31 \ + --hash=sha256:cb33ccf15e89f7ed89b235cff9d49e2e62c6c981a6061c9c8bb47ed7951190bc \ + --hash=sha256:d198820aba55660b4d74f7b5fd1f17db3aa5eb3e6893b0a41b75e84e4f9e0e4b \ + --hash=sha256:d34579085401d3f49762d2f7d6634d6b6c2ae1242202e860f4d26b046e3a1006 \ + --hash=sha256:eb8163f5e549a22888c18b0d53d6bb62a20510060a22fd5a995ec8a05268df8a \ + --hash=sha256:f73bff05db2a3e5974a6fd248af2566134d8981fd7ab012e5dd4ddb1d9a70699 # via pyopenssl dateparser==1.1.8 \ --hash=sha256:070b29b5bbf4b1ec2cd51c96ea040dc68a614de703910a91ad1abba18f9f379f \ diff --git a/requirements/reproducible-mac.txt b/requirements/reproducible-mac.txt index a922563c91..0fb4cfca06 100644 --- a/requirements/reproducible-mac.txt +++ b/requirements/reproducible-mac.txt @@ -230,26 +230,26 @@ cookiecutter==2.1.1 \ --hash=sha256:9f3ab027cec4f70916e28f03470bdb41e637a3ad354b4d65c765d93aad160022 \ --hash=sha256:f3982be8d9c53dac1261864013fdec7f83afd2e42ede6f6dd069c5e149c540d5 # via aws-sam-cli (setup.py) -cryptography==41.0.0 \ - --hash=sha256:0ddaee209d1cf1f180f1efa338a68c4621154de0afaef92b89486f5f96047c55 \ - --hash=sha256:14754bcdae909d66ff24b7b5f166d69340ccc6cb15731670435efd5719294895 \ - --hash=sha256:344c6de9f8bda3c425b3a41b319522ba3208551b70c2ae00099c205f0d9fd3be \ - --hash=sha256:34d405ea69a8b34566ba3dfb0521379b210ea5d560fafedf9f800a9a94a41928 \ - --hash=sha256:3680248309d340fda9611498a5319b0193a8dbdb73586a1acf8109d06f25b92d \ - --hash=sha256:3c5ef25d060c80d6d9f7f9892e1d41bb1c79b78ce74805b8cb4aa373cb7d5ec8 \ - --hash=sha256:4ab14d567f7bbe7f1cdff1c53d5324ed4d3fc8bd17c481b395db224fb405c237 \ - --hash=sha256:5c1f7293c31ebc72163a9a0df246f890d65f66b4a40d9ec80081969ba8c78cc9 \ - --hash=sha256:6b71f64beeea341c9b4f963b48ee3b62d62d57ba93eb120e1196b31dc1025e78 \ - --hash=sha256:7d92f0248d38faa411d17f4107fc0bce0c42cae0b0ba5415505df72d751bf62d \ - --hash=sha256:8362565b3835ceacf4dc8f3b56471a2289cf51ac80946f9087e66dc283a810e0 \ - --hash=sha256:84a165379cb9d411d58ed739e4af3396e544eac190805a54ba2e0322feb55c46 \ - --hash=sha256:88ff107f211ea696455ea8d911389f6d2b276aabf3231bf72c8853d22db755c5 \ - --hash=sha256:9f65e842cb02550fac96536edb1d17f24c0a338fd84eaf582be25926e993dde4 \ - --hash=sha256:a4fc68d1c5b951cfb72dfd54702afdbbf0fb7acdc9b7dc4301bbf2225a27714d \ - --hash=sha256:b7f2f5c525a642cecad24ee8670443ba27ac1fab81bba4cc24c7b6b41f2d0c75 \ - --hash=sha256:b846d59a8d5a9ba87e2c3d757ca019fa576793e8758174d3868aecb88d6fc8eb \ - --hash=sha256:bf8fc66012ca857d62f6a347007e166ed59c0bc150cefa49f28376ebe7d992a2 \ - --hash=sha256:f5d0bf9b252f30a31664b6f64432b4730bb7038339bd18b1fafe129cfc2be9be +cryptography==41.0.1 \ + --hash=sha256:059e348f9a3c1950937e1b5d7ba1f8e968508ab181e75fc32b879452f08356db \ + --hash=sha256:1a5472d40c8f8e91ff7a3d8ac6dfa363d8e3138b961529c996f3e2df0c7a411a \ + --hash=sha256:1a8e6c2de6fbbcc5e14fd27fb24414507cb3333198ea9ab1258d916f00bc3039 \ + --hash=sha256:1fee5aacc7367487b4e22484d3c7e547992ed726d14864ee33c0176ae43b0d7c \ + --hash=sha256:5d092fdfedaec4cbbffbf98cddc915ba145313a6fdaab83c6e67f4e6c218e6f3 \ + --hash=sha256:5f0ff6e18d13a3de56f609dd1fd11470918f770c6bd5d00d632076c727d35485 \ + --hash=sha256:7bfc55a5eae8b86a287747053140ba221afc65eb06207bedf6e019b8934b477c \ + --hash=sha256:7fa01527046ca5facdf973eef2535a27fec4cb651e4daec4d043ef63f6ecd4ca \ + --hash=sha256:8dde71c4169ec5ccc1087bb7521d54251c016f126f922ab2dfe6649170a3b8c5 \ + --hash=sha256:8f4ab7021127a9b4323537300a2acfb450124b2def3756f64dc3a3d2160ee4b5 \ + --hash=sha256:948224d76c4b6457349d47c0c98657557f429b4e93057cf5a2f71d603e2fc3a3 \ + --hash=sha256:9a6c7a3c87d595608a39980ebaa04d5a37f94024c9f24eb7d10262b92f739ddb \ + --hash=sha256:b46e37db3cc267b4dea1f56da7346c9727e1209aa98487179ee8ebed09d21e43 \ + --hash=sha256:b4ceb5324b998ce2003bc17d519080b4ec8d5b7b70794cbd2836101406a9be31 \ + --hash=sha256:cb33ccf15e89f7ed89b235cff9d49e2e62c6c981a6061c9c8bb47ed7951190bc \ + --hash=sha256:d198820aba55660b4d74f7b5fd1f17db3aa5eb3e6893b0a41b75e84e4f9e0e4b \ + --hash=sha256:d34579085401d3f49762d2f7d6634d6b6c2ae1242202e860f4d26b046e3a1006 \ + --hash=sha256:eb8163f5e549a22888c18b0d53d6bb62a20510060a22fd5a995ec8a05268df8a \ + --hash=sha256:f73bff05db2a3e5974a6fd248af2566134d8981fd7ab012e5dd4ddb1d9a70699 # via pyopenssl dateparser==1.1.8 \ --hash=sha256:070b29b5bbf4b1ec2cd51c96ea040dc68a614de703910a91ad1abba18f9f379f \ From cb5e46bf1972c498a80d4e47bb4a5cb5263ea8ed Mon Sep 17 00:00:00 2001 From: Daniel Mil <84205762+mildaniel@users.noreply.github.com> Date: Mon, 10 Jul 2023 15:25:48 -0700 Subject: [PATCH 12/32] fix: E2E test can't find esbuild binary (#5476) --- .../esbuild-datadog-integration/package.json | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 tests/end_to_end/testdata/esbuild-datadog-integration/package.json diff --git a/tests/end_to_end/testdata/esbuild-datadog-integration/package.json b/tests/end_to_end/testdata/esbuild-datadog-integration/package.json new file mode 100644 index 0000000000..8a10aa75a1 --- /dev/null +++ b/tests/end_to_end/testdata/esbuild-datadog-integration/package.json @@ -0,0 +1,12 @@ +{ + "name": "npmdeps", + "version": "1.0.0", + "description": "", + "keywords": [], + "author": "", + "license": "APACHE2.0", + "main": "main.js", + "dependencies": { + "esbuild": "^0.14.14" + } +} \ No newline at end of file From d5ce6d5c25054e7a3ca06c6c2e89f113b7c61842 Mon Sep 17 00:00:00 2001 From: Jacob Fuss <32497805+jfuss@users.noreply.github.com> Date: Mon, 10 Jul 2023 17:43:17 -0500 Subject: [PATCH 13/32] fix(invoke): Write in UTF-8 string instead of bytes (#5427) * Revert "Revert "fix(invoke): Write in UTF-8 string instead of bytes. (#5232)" (#5401)" This reverts commit 7b7c54c59ad15ad90fd558d640daefa5142115d7. * Add typing and fix issues found in the reverted commit * Update of comments * handle pr feedback --------- Co-authored-by: Jacob Fuss --- .../commands/remote/remote_invoke_context.py | 4 +- samcli/lib/docker/log_streamer.py | 28 +++++------ samcli/lib/package/s3_uploader.py | 2 +- samcli/lib/utils/osutils.py | 13 +++-- samcli/lib/utils/stream_writer.py | 21 +++++++-- samcli/lib/utils/subprocess_utils.py | 4 +- samcli/local/docker/container.py | 47 +++++++++++++++---- samcli/local/docker/lambda_image.py | 11 +++-- samcli/local/docker/manager.py | 6 +-- .../local/invoke/test_integrations_cli.py | 21 +++++++++ tests/unit/lib/utils/test_osutils.py | 8 ---- tests/unit/lib/utils/test_stream_writer.py | 11 +++-- tests/unit/lib/utils/test_subprocess_utils.py | 13 ++--- tests/unit/local/docker/test_container.py | 29 ++++++------ tests/unit/local/docker/test_lambda_image.py | 9 ++-- tests/unit/local/docker/test_manager.py | 24 +++++++--- 16 files changed, 162 insertions(+), 89 deletions(-) diff --git a/samcli/commands/remote/remote_invoke_context.py b/samcli/commands/remote/remote_invoke_context.py index 90242b5142..93663d92fa 100644 --- a/samcli/commands/remote/remote_invoke_context.py +++ b/samcli/commands/remote/remote_invoke_context.py @@ -242,7 +242,7 @@ class DefaultRemoteInvokeResponseConsumer(RemoteInvokeConsumer[RemoteInvokeRespo _stream_writer: StreamWriter def consume(self, remote_invoke_response: RemoteInvokeResponse) -> None: - self._stream_writer.write(cast(str, remote_invoke_response.response).encode()) + self._stream_writer.write_bytes(cast(str, remote_invoke_response.response).encode()) @dataclass @@ -254,4 +254,4 @@ class DefaultRemoteInvokeLogConsumer(RemoteInvokeConsumer[RemoteInvokeLogOutput] _stream_writer: StreamWriter def consume(self, remote_invoke_response: RemoteInvokeLogOutput) -> None: - self._stream_writer.write(remote_invoke_response.log_output.encode()) + self._stream_writer.write_bytes(remote_invoke_response.log_output.encode()) diff --git a/samcli/lib/docker/log_streamer.py b/samcli/lib/docker/log_streamer.py index b013459bae..3bb437781a 100644 --- a/samcli/lib/docker/log_streamer.py +++ b/samcli/lib/docker/log_streamer.py @@ -47,23 +47,21 @@ def stream_progress(self, logs: docker.APIClient.logs): else: curr_log_line_id = ids[_id] change_cursor_count = len(ids) - curr_log_line_id - self._stream.write( + self._stream.write_str( self._cursor_up_formatter.cursor_format(change_cursor_count) - + self._cursor_left_formatter.cursor_format(), - encode=True, + + self._cursor_left_formatter.cursor_format() ) self._stream_write(_id, status, stream, progress, error) if _id: - self._stream.write( + self._stream.write_str( self._cursor_down_formatter.cursor_format(change_cursor_count) - + self._cursor_left_formatter.cursor_format(), - encode=True, + + self._cursor_left_formatter.cursor_format() ) - self._stream.write(os.linesep, encode=True) + self._stream.write_str(os.linesep) - def _stream_write(self, _id: str, status: str, stream: bytes, progress: str, error: str): + def _stream_write(self, _id: str, status: str, stream: str, progress: str, error: str): """ Write stream information to stderr, if the stream information contains a log id, use the carriage return character to rewrite that particular line. @@ -80,14 +78,14 @@ def _stream_write(self, _id: str, status: str, stream: bytes, progress: str, err # NOTE(sriram-mv): Required for the purposes of when the cursor overflows existing terminal buffer. if not stream: - self._stream.write(os.linesep, encode=True) - self._stream.write( - self._cursor_up_formatter.cursor_format() + self._cursor_left_formatter.cursor_format(), encode=True + self._stream.write_str(os.linesep) + self._stream.write_str( + self._cursor_up_formatter.cursor_format() + self._cursor_left_formatter.cursor_format() ) - self._stream.write(self._cursor_clear_formatter.cursor_format(), encode=True) + self._stream.write_str(self._cursor_clear_formatter.cursor_format()) if not _id: - self._stream.write(stream, encode=True) - self._stream.write(status, encode=True) + self._stream.write_str(stream) + self._stream.write_str(status) else: - self._stream.write(f"\r{_id}: {status} {progress}", encode=True) + self._stream.write_str(f"\r{_id}: {status} {progress}") diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index fe141ada51..95981e92ed 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -265,4 +265,4 @@ def on_progress(self, bytes_transferred, **kwargs): ) sys.stderr.flush() if int(percentage) == 100: # noqa: PLR2004 - sys.stderr.write("\n") + sys.stderr.write(os.linesep) diff --git a/samcli/lib/utils/osutils.py b/samcli/lib/utils/osutils.py index d53dc9ffb5..f722a8deac 100644 --- a/samcli/lib/utils/osutils.py +++ b/samcli/lib/utils/osutils.py @@ -1,6 +1,7 @@ """ Common OS utilities """ +import io import logging import os import shutil @@ -78,7 +79,7 @@ def rmtree_if_exists(path: Union[str, Path]): shutil.rmtree(path_obj) -def stdout(): +def stdout() -> io.TextIOWrapper: """ Returns the stdout as a byte stream in a Py2/PY3 compatible manner @@ -87,10 +88,12 @@ def stdout(): io.BytesIO Byte stream of Stdout """ - return sys.stdout.buffer + # Note(jfuss): sys.stdout is a type typing.TextIO but are initialized to + # io.TextIOWrapper. To make mypy and typing play well, tell mypy to ignore. + return sys.stdout # type:ignore[return-value] -def stderr(): +def stderr() -> io.TextIOWrapper: """ Returns the stderr as a byte stream in a Py2/PY3 compatible manner @@ -99,7 +102,9 @@ def stderr(): io.BytesIO Byte stream of stderr """ - return sys.stderr.buffer + # Note(jfuss): sys.stderr is a type typing.TextIO but are initialized to + # io.TextIOWrapper. To make mypy and typing play well, tell mypy to ignore. + return sys.stderr # type:ignore[return-value] def remove(path): diff --git a/samcli/lib/utils/stream_writer.py b/samcli/lib/utils/stream_writer.py index 1fc62fa690..e5d0b43c1e 100644 --- a/samcli/lib/utils/stream_writer.py +++ b/samcli/lib/utils/stream_writer.py @@ -1,10 +1,11 @@ """ This class acts like a wrapper around output streams to provide any flexibility with output we need """ +from typing import Union class StreamWriter: - def __init__(self, stream, auto_flush=False): + def __init__(self, stream, auto_flush: bool = False): """ Instatiates new StreamWriter to the specified stream @@ -22,7 +23,7 @@ def __init__(self, stream, auto_flush=False): def stream(self): return self._stream - def write(self, output, encode=False): + def write_bytes(self, output: Union[bytes, bytearray]): """ Writes specified text to the underlying stream @@ -31,7 +32,21 @@ def write(self, output, encode=False): output bytes-like object Bytes to write """ - self._stream.write(output.encode() if encode else output) + self._stream.buffer.write(output) + + if self._auto_flush: + self._stream.flush() + + def write_str(self, output: str): + """ + Writes specified text to the underlying stream + + Parameters + ---------- + output bytes-like object + Bytes to write + """ + self._stream.write(output) if self._auto_flush: self._stream.flush() diff --git a/samcli/lib/utils/subprocess_utils.py b/samcli/lib/utils/subprocess_utils.py index e08ec12e49..1937a44eeb 100644 --- a/samcli/lib/utils/subprocess_utils.py +++ b/samcli/lib/utils/subprocess_utils.py @@ -34,7 +34,7 @@ def default_loading_pattern(stream_writer: Optional[StreamWriter] = None, loadin How frequently to generate the pattern """ stream_writer = stream_writer or StreamWriter(sys.stderr) - stream_writer.write(".") + stream_writer.write_str(".") stream_writer.flush() sleep(loading_pattern_rate) @@ -96,7 +96,7 @@ def _print_loading_pattern(): return_code = process.wait() keep_printing = False - stream_writer.write(os.linesep) + stream_writer.write_str(os.linesep) stream_writer.flush() process_stderr = _check_and_convert_stream_to_string(process.stderr) diff --git a/samcli/local/docker/container.py b/samcli/local/docker/container.py index e70f7c2a1f..7082d521f1 100644 --- a/samcli/local/docker/container.py +++ b/samcli/local/docker/container.py @@ -1,6 +1,8 @@ """ Representation of a generic Docker container """ +import io +import json import logging import os import pathlib @@ -9,7 +11,7 @@ import tempfile import threading import time -from typing import Optional +from typing import Iterator, Optional, Tuple, Union import docker import requests @@ -17,6 +19,7 @@ from samcli.lib.constants import DOCKER_MIN_API_VERSION from samcli.lib.utils.retry import retry +from samcli.lib.utils.stream_writer import StreamWriter from samcli.lib.utils.tar import extract_tarfile from samcli.local.docker.effective_user import ROOT_USER_ID, EffectiveUser @@ -314,7 +317,7 @@ def start(self, input_data=None): real_container.start() @retry(exc=requests.exceptions.RequestException, exc_raise=ContainerResponseException) - def wait_for_http_response(self, name, event, stdout): + def wait_for_http_response(self, name, event, stdout) -> str: # TODO(sriram-mv): `aws-lambda-rie` is in a mode where the function_name is always "function" # NOTE(sriram-mv): There is a connection timeout set on the http call to `aws-lambda-rie`, however there is not # a read time out for the response received from the server. @@ -324,7 +327,7 @@ def wait_for_http_response(self, name, event, stdout): data=event.encode("utf-8"), timeout=(self.RAPID_CONNECTION_TIMEOUT, None), ) - stdout.write(resp.content) + return json.dumps(json.loads(resp.content), ensure_ascii=False) def wait_for_result(self, full_path, event, stdout, stderr, start_timer=None): # NOTE(sriram-mv): Let logging happen in its own thread, so that a http request can be sent. @@ -344,11 +347,21 @@ def wait_for_result(self, full_path, event, stdout, stderr, start_timer=None): # start the timer for function timeout right before executing the function, as waiting for the socket # can take some time timer = start_timer() if start_timer else None - self.wait_for_http_response(full_path, event, stdout) + response = self.wait_for_http_response(full_path, event, stdout) if timer: timer.cancel() - def wait_for_logs(self, stdout=None, stderr=None): + # NOTE(jfuss): Adding a sleep after we get a response from the contianer but before we + # we write the response to ensure the last thing written to stdout is the container response + time.sleep(1) + stdout.write_str(response) + stdout.flush() + + def wait_for_logs( + self, + stdout: Optional[Union[StreamWriter, io.BytesIO, io.TextIOWrapper]] = None, + stderr: Optional[Union[StreamWriter, io.BytesIO, io.TextIOWrapper]] = None, + ): # Return instantly if we don't have to fetch any logs if not stdout and not stderr: return @@ -360,7 +373,6 @@ def wait_for_logs(self, stdout=None, stderr=None): # Fetch both stdout and stderr streams from Docker as a single iterator. logs_itr = real_container.attach(stream=True, logs=True, demux=True) - self._write_container_output(logs_itr, stdout=stdout, stderr=stderr) def _wait_for_socket_connection(self) -> None: @@ -411,7 +423,11 @@ def copy(self, from_container_path, to_host_path) -> None: extract_tarfile(file_obj=fp, unpack_dir=to_host_path) @staticmethod - def _write_container_output(output_itr, stdout=None, stderr=None): + def _write_container_output( + output_itr: Iterator[Tuple[bytes, bytes]], + stdout: Optional[Union[StreamWriter, io.BytesIO, io.TextIOWrapper]] = None, + stderr: Optional[Union[StreamWriter, io.BytesIO, io.TextIOWrapper]] = None, + ): """ Based on the data returned from the Container output, via the iterator, write it to the appropriate streams @@ -430,13 +446,26 @@ def _write_container_output(output_itr, stdout=None, stderr=None): # Iterator returns a tuple of (stdout, stderr) for stdout_data, stderr_data in output_itr: if stdout_data and stdout: - stdout.write(stdout_data) + Container._handle_data_writing(stdout, stdout_data) if stderr_data and stderr: - stderr.write(stderr_data) + Container._handle_data_writing(stderr, stderr_data) + except Exception as ex: LOG.debug("Failed to get the logs from the container", exc_info=ex) + @staticmethod + def _handle_data_writing(output_stream: Union[StreamWriter, io.BytesIO, io.TextIOWrapper], output_data: bytes): + if isinstance(output_stream, StreamWriter): + output_stream.write_bytes(output_data) + output_stream.flush() + + if isinstance(output_stream, io.BytesIO): + output_stream.write(output_data) + + if isinstance(output_stream, io.TextIOWrapper): + output_stream.buffer.write(output_data) + @property def network_id(self): """ diff --git a/samcli/local/docker/lambda_image.py b/samcli/local/docker/lambda_image.py index 23f0a770d9..4dff0d820f 100644 --- a/samcli/local/docker/lambda_image.py +++ b/samcli/local/docker/lambda_image.py @@ -3,6 +3,7 @@ """ import hashlib import logging +import os import platform import re import sys @@ -226,7 +227,7 @@ def build(self, runtime, packagetype, image, layers, architecture, stream=None, or not runtime ): stream_writer = stream or StreamWriter(sys.stderr) - stream_writer.write("Building image...") + stream_writer.write_str("Building image...") stream_writer.flush() self._build_image( image if image else base_image, rapid_image, downloaded_layers, architecture, stream=stream_writer @@ -337,15 +338,15 @@ def set_item_permission(tar_info): platform=get_docker_platform(architecture), ) for log in resp_stream: - stream_writer.write(".") + stream_writer.write_str(".") stream_writer.flush() if "error" in log: - stream_writer.write("\n") + stream_writer.write_str(os.linesep) LOG.exception("Failed to build Docker Image") raise ImageBuildException("Error building docker image: {}".format(log["error"])) - stream_writer.write("\n") + stream_writer.write_str(os.linesep) except (docker.errors.BuildError, docker.errors.APIError) as ex: - stream_writer.write("\n") + stream_writer.write_str(os.linesep) LOG.exception("Failed to build Docker Image") raise ImageBuildException("Building Image failed.") from ex finally: diff --git a/samcli/local/docker/manager.py b/samcli/local/docker/manager.py index a035003bb0..6975828cd1 100644 --- a/samcli/local/docker/manager.py +++ b/samcli/local/docker/manager.py @@ -168,16 +168,16 @@ def pull_image(self, image_name, tag=None, stream=None): raise DockerImagePullFailedException(str(ex)) from ex # io streams, especially StringIO, work only with unicode strings - stream_writer.write("\nFetching {}:{} Docker container image...".format(image_name, tag)) + stream_writer.write_str("\nFetching {}:{} Docker container image...".format(image_name, tag)) # Each line contains information on progress of the pull. Each line is a JSON string for _ in result_itr: # For every line, print a dot to show progress - stream_writer.write(".") + stream_writer.write_str(".") stream_writer.flush() # We are done. Go to the next line - stream_writer.write("\n") + stream_writer.write_str("\n") def has_image(self, image_name): """ diff --git a/tests/integration/local/invoke/test_integrations_cli.py b/tests/integration/local/invoke/test_integrations_cli.py index 3604fc4010..70711459d6 100644 --- a/tests/integration/local/invoke/test_integrations_cli.py +++ b/tests/integration/local/invoke/test_integrations_cli.py @@ -291,6 +291,27 @@ def test_invoke_returns_expected_result_when_no_event_given(self): self.assertEqual(process.returncode, 0) self.assertEqual("{}", process_stdout.decode("utf-8")) + @pytest.mark.flaky(reruns=3) + def test_invoke_returns_utf8(self): + command_list = InvokeIntegBase.get_command_list( + "EchoEventFunction", template_path=self.template_path, event_path=self.event_utf8_path + ) + + process = Popen(command_list, stdout=PIPE) + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stdout = stdout.strip() + + with open(self.event_utf8_path) as f: + expected_output = json.dumps(json.load(f), ensure_ascii=False) + + self.assertEqual(process.returncode, 0) + self.assertEqual(expected_output, process_stdout.decode("utf-8")) + @pytest.mark.flaky(reruns=3) def test_invoke_with_env_using_parameters(self): command_list = InvokeIntegBase.get_command_list( diff --git a/tests/unit/lib/utils/test_osutils.py b/tests/unit/lib/utils/test_osutils.py index bf4794f2c4..6f7a6cf4df 100644 --- a/tests/unit/lib/utils/test_osutils.py +++ b/tests/unit/lib/utils/test_osutils.py @@ -34,9 +34,7 @@ def test_raises_on_cleanup_failure(self, rmdir_mock): @patch("os.rmdir") def test_handles_ignore_error_case(self, rmdir_mock): rmdir_mock.side_effect = OSError("fail") - dir_name = None with osutils.mkdir_temp(ignore_errors=True) as tempdir: - dir_name = tempdir self.assertTrue(os.path.exists(tempdir)) @@ -44,9 +42,6 @@ class Test_stderr(TestCase): def test_must_return_sys_stderr(self): expected_stderr = sys.stderr - if sys.version_info.major > 2: - expected_stderr = sys.stderr.buffer - self.assertEqual(expected_stderr, osutils.stderr()) @@ -54,9 +49,6 @@ class Test_stdout(TestCase): def test_must_return_sys_stdout(self): expected_stdout = sys.stdout - if sys.version_info.major > 2: - expected_stdout = sys.stdout.buffer - self.assertEqual(expected_stdout, osutils.stdout()) diff --git a/tests/unit/lib/utils/test_stream_writer.py b/tests/unit/lib/utils/test_stream_writer.py index cb48955850..a6875b59da 100644 --- a/tests/unit/lib/utils/test_stream_writer.py +++ b/tests/unit/lib/utils/test_stream_writer.py @@ -1,6 +1,7 @@ """ Tests for StreamWriter """ +import io from unittest import TestCase @@ -11,13 +12,13 @@ class TestStreamWriter(TestCase): def test_must_write_to_stream(self): - buffer = "something" + buffer = b"something" stream_mock = Mock() writer = StreamWriter(stream_mock) - writer.write(buffer) + writer.write_bytes(buffer) - stream_mock.write.assert_called_once_with(buffer) + stream_mock.buffer.write.assert_called_once_with(buffer) def test_must_flush_underlying_stream(self): stream_mock = Mock() @@ -31,7 +32,7 @@ def test_auto_flush_must_be_off_by_default(self): stream_mock = Mock() writer = StreamWriter(stream_mock) - writer.write("something") + writer.write_str("something") stream_mock.flush.assert_not_called() @@ -46,6 +47,6 @@ def test_when_auto_flush_on_flush_after_each_write(self): writer = StreamWriter(stream_mock, True) for line in lines: - writer.write(line) + writer.write_str(line) flush_mock.assert_called_once_with() flush_mock.reset_mock() diff --git a/tests/unit/lib/utils/test_subprocess_utils.py b/tests/unit/lib/utils/test_subprocess_utils.py index 969f06085b..a9d39afdd2 100644 --- a/tests/unit/lib/utils/test_subprocess_utils.py +++ b/tests/unit/lib/utils/test_subprocess_utils.py @@ -11,6 +11,7 @@ from parameterized import parameterized from unittest.mock import patch, Mock, call, ANY +from samcli.lib.utils.stream_writer import StreamWriter from samcli.lib.utils.subprocess_utils import ( default_loading_pattern, invoke_subprocess_with_loading_pattern, @@ -64,7 +65,7 @@ def test_loader_stream_uses_passed_in_stdout( @patch("samcli.lib.utils.subprocess_utils.Popen") def test_loader_raises_exception_non_zero_exit_code(self, patched_Popen): standard_error = "an error has occurred" - mock_stream_writer = Mock() + mock_stream_writer = Mock(spec=StreamWriter) mock_process = Mock() mock_process.returncode = 1 mock_process.stdout = None @@ -74,7 +75,7 @@ def test_loader_raises_exception_non_zero_exit_code(self, patched_Popen): with self.assertRaises(LoadingPatternError) as ex: invoke_subprocess_with_loading_pattern({"args": ["ls"]}, mock_pattern, mock_stream_writer) self.assertIn(standard_error, ex.exception.message) - mock_stream_writer.write.assert_called_once_with(os.linesep) + mock_stream_writer.write_str.assert_called_once_with(os.linesep) mock_stream_writer.flush.assert_called_once_with() @patch("samcli.lib.utils.subprocess_utils.Popen") @@ -95,19 +96,19 @@ def test_loader_raises_exception_bad_process(self, patched_Popen): @patch("samcli.lib.utils.subprocess_utils.StreamWriter") def test_default_pattern_default_stream_writer(self, patched_stream_writer): - stream_writer_mock = Mock() + stream_writer_mock = Mock(spec=StreamWriter) patched_stream_writer.return_value = stream_writer_mock default_loading_pattern(loading_pattern_rate=0.01) patched_stream_writer.assert_called_once_with(sys.stderr) - stream_writer_mock.write.assert_called_once_with(".") + stream_writer_mock.write_str.assert_called_once_with(".") stream_writer_mock.flush.assert_called_once_with() @patch("samcli.lib.utils.subprocess_utils.StreamWriter") def test_default_pattern(self, patched_stream_writer): - stream_writer_mock = Mock() + stream_writer_mock = Mock(spec=StreamWriter) default_loading_pattern(stream_writer_mock, 0.01) patched_stream_writer.assert_not_called() - stream_writer_mock.write.assert_called_once_with(".") + stream_writer_mock.write_str.assert_called_once_with(".") stream_writer_mock.flush.assert_called_once_with() @parameterized.expand([("hello".encode("utf-8"), "hello"), ("hello", "hello")]) diff --git a/tests/unit/local/docker/test_container.py b/tests/unit/local/docker/test_container.py index 14f292c0ce..c3a5671ea3 100644 --- a/tests/unit/local/docker/test_container.py +++ b/tests/unit/local/docker/test_container.py @@ -9,6 +9,7 @@ from requests import RequestException from samcli.lib.utils.packagetype import IMAGE +from samcli.lib.utils.stream_writer import StreamWriter from samcli.local.docker.container import Container, ContainerResponseException, ContainerConnectionTimeoutException @@ -696,17 +697,17 @@ def test_wait_for_result_waits_for_socket_before_post_request(self, patched_time self.assertEqual(mock_requests.post.call_count, 0) def test_write_container_output_successful(self): - stdout_mock = Mock() - stderr_mock = Mock() + stdout_mock = Mock(spec=StreamWriter) + stderr_mock = Mock(spec=StreamWriter) def _output_iterator(): - yield "Hello", None - yield None, "World" + yield b"Hello", None + yield None, b"World" raise ValueError("The pipe has been ended.") Container._write_container_output(_output_iterator(), stdout_mock, stderr_mock) - stdout_mock.assert_has_calls([call.write("Hello")]) - stderr_mock.assert_has_calls([call.write("World")]) + stdout_mock.assert_has_calls([call.write_bytes(b"Hello")]) + stderr_mock.assert_has_calls([call.write_bytes(b"World")]) class TestContainer_wait_for_logs(TestCase): @@ -760,33 +761,33 @@ class TestContainer_write_container_output(TestCase): def setUp(self): self.output_itr = [(b"stdout1", None), (None, b"stderr1"), (b"stdout2", b"stderr2"), (None, None)] - self.stdout_mock = Mock() - self.stderr_mock = Mock() + self.stdout_mock = Mock(spec=StreamWriter) + self.stderr_mock = Mock(spec=StreamWriter) def test_must_write_stdout_and_stderr_data(self): # All the invalid frames must be ignored Container._write_container_output(self.output_itr, stdout=self.stdout_mock, stderr=self.stderr_mock) - self.stdout_mock.write.assert_has_calls([call(b"stdout1"), call(b"stdout2")]) + self.stdout_mock.write_bytes.assert_has_calls([call(b"stdout1"), call(b"stdout2")]) - self.stderr_mock.write.assert_has_calls([call(b"stderr1"), call(b"stderr2")]) + self.stderr_mock.write_bytes.assert_has_calls([call(b"stderr1"), call(b"stderr2")]) def test_must_write_only_stderr(self): # All the invalid frames must be ignored Container._write_container_output(self.output_itr, stdout=None, stderr=self.stderr_mock) - self.stdout_mock.write.assert_not_called() + self.stdout_mock.write_bytes.assert_not_called() - self.stderr_mock.write.assert_has_calls([call(b"stderr1"), call(b"stderr2")]) + self.stderr_mock.write_bytes.assert_has_calls([call(b"stderr1"), call(b"stderr2")]) def test_must_write_only_stdout(self): Container._write_container_output(self.output_itr, stdout=self.stdout_mock, stderr=None) - self.stdout_mock.write.assert_has_calls([call(b"stdout1"), call(b"stdout2")]) + self.stdout_mock.write_bytes.assert_has_calls([call(b"stdout1"), call(b"stdout2")]) - self.stderr_mock.write.assert_not_called() # stderr must never be called + self.stderr_mock.write_bytes.assert_not_called() # stderr must never be called class TestContainer_wait_for_socket_connection(TestCase): diff --git a/tests/unit/local/docker/test_lambda_image.py b/tests/unit/local/docker/test_lambda_image.py index 1e8f936d98..03b57be804 100644 --- a/tests/unit/local/docker/test_lambda_image.py +++ b/tests/unit/local/docker/test_lambda_image.py @@ -1,4 +1,3 @@ -import io import tempfile from unittest import TestCase @@ -271,7 +270,7 @@ def test_force_building_image_that_doesnt_already_exists( docker_client_mock.images.get.side_effect = ImageNotFound("image not found") docker_client_mock.images.list.return_value = [] - stream = io.StringIO() + stream = Mock() lambda_image = LambdaImage(layer_downloader_mock, False, True, docker_client=docker_client_mock) actual_image_id = lambda_image.build( @@ -311,7 +310,7 @@ def test_force_building_image_on_daemon_404( docker_client_mock.images.get.side_effect = NotFound("image not found") docker_client_mock.images.list.return_value = [] - stream = io.StringIO() + stream = Mock() lambda_image = LambdaImage(layer_downloader_mock, False, True, docker_client=docker_client_mock) actual_image_id = lambda_image.build( @@ -351,7 +350,7 @@ def test_docker_distribution_api_error_on_daemon_api_error( docker_client_mock.images.get.side_effect = APIError("error from docker daemon") docker_client_mock.images.list.return_value = [] - stream = io.StringIO() + stream = Mock() lambda_image = LambdaImage(layer_downloader_mock, False, True, docker_client=docker_client_mock) with self.assertRaises(DockerDistributionAPIError): @@ -377,7 +376,7 @@ def test_not_force_building_image_that_doesnt_already_exists( docker_client_mock.images.get.side_effect = ImageNotFound("image not found") docker_client_mock.images.list.return_value = [] - stream = io.StringIO() + stream = Mock() lambda_image = LambdaImage(layer_downloader_mock, False, False, docker_client=docker_client_mock) actual_image_id = lambda_image.build( diff --git a/tests/unit/local/docker/test_manager.py b/tests/unit/local/docker/test_manager.py index ada69903ea..4cb42bbd02 100644 --- a/tests/unit/local/docker/test_manager.py +++ b/tests/unit/local/docker/test_manager.py @@ -1,8 +1,6 @@ """ Tests container manager """ - -import io import importlib from unittest import TestCase from unittest.mock import Mock, patch, MagicMock, ANY, call @@ -218,17 +216,29 @@ def setUp(self): self.manager = ContainerManager(docker_client=self.mock_docker_client) def test_must_pull_and_print_progress_dots(self): - stream = io.StringIO() + stream = Mock() pull_result = [1, 2, 3, 4, 5, 6, 7, 8, 9, 0] self.mock_docker_client.api.pull.return_value = pull_result - expected_stream_output = "\nFetching {}:latest Docker container image...{}\n".format( - self.image_name, "." * len(pull_result) # Progress bar will print one dot per response from pull API - ) + expected_stream_calls = [ + call(f"\nFetching {self.image_name}:latest Docker container image..."), + call("."), + call("."), + call("."), + call("."), + call("."), + call("."), + call("."), + call("."), + call("."), + call("."), + call("\n"), + ] self.manager.pull_image(self.image_name, stream=stream) self.mock_docker_client.api.pull.assert_called_with(self.image_name, stream=True, decode=True, tag="latest") - self.assertEqual(stream.getvalue(), expected_stream_output) + + stream.write_str.assert_has_calls(expected_stream_calls) def test_must_raise_if_image_not_found(self): msg = "some error" From 7efaac1e966c8d06e4d10a12355955aee7991827 Mon Sep 17 00:00:00 2001 From: hnnasit <84355507+hnnasit@users.noreply.github.com> Date: Tue, 11 Jul 2023 09:35:10 -0700 Subject: [PATCH 14/32] feat: Add remote invoke implementation for step functions (#5458) * Added remote invoke implementation for step functions * Added type hints and changed datetime conversion * Add unit tests * Updated logic for supported services * Removed step functions from supported services * Updated method doc string * Address comments * Add a unit test for checking supported executors * Remove Any typing and replace with specific type --- samcli/commands/remote/exceptions.py | 2 +- .../commands/remote/remote_invoke_context.py | 10 +- .../remote_invoke/lambda_invoke_executors.py | 16 +- .../remote_invoke_executor_factory.py | 74 +++++++- .../stepfunctions_invoke_executors.py | 154 ++++++++++++++++ .../remote/test_remote_invoke_context.py | 5 +- .../test_remote_invoke_executor_factory.py | 63 ++++++- .../test_stepfunctions_invoke_executors.py | 165 ++++++++++++++++++ 8 files changed, 463 insertions(+), 26 deletions(-) create mode 100644 samcli/lib/remote_invoke/stepfunctions_invoke_executors.py create mode 100644 tests/unit/lib/remote_invoke/test_stepfunctions_invoke_executors.py diff --git a/samcli/commands/remote/exceptions.py b/samcli/commands/remote/exceptions.py index 68b0d5983f..e2a5c66b98 100644 --- a/samcli/commands/remote/exceptions.py +++ b/samcli/commands/remote/exceptions.py @@ -20,7 +20,7 @@ class UnsupportedServiceForRemoteInvoke(UserException): pass -class NoExecutorFoundForRemoteInvoke(UserException): +class ResourceNotSupportedForRemoteInvoke(UserException): pass diff --git a/samcli/commands/remote/remote_invoke_context.py b/samcli/commands/remote/remote_invoke_context.py index 93663d92fa..d1294983bc 100644 --- a/samcli/commands/remote/remote_invoke_context.py +++ b/samcli/commands/remote/remote_invoke_context.py @@ -11,8 +11,8 @@ AmbiguousResourceForRemoteInvoke, InvalidRemoteInvokeParameters, InvalidStackNameProvidedForRemoteInvoke, - NoExecutorFoundForRemoteInvoke, NoResourceFoundForRemoteInvoke, + ResourceNotSupportedForRemoteInvoke, UnsupportedServiceForRemoteInvoke, ) from samcli.lib.remote_invoke.remote_invoke_executor_factory import RemoteInvokeExecutorFactory @@ -70,8 +70,8 @@ def __exit__(self, *args) -> None: def run(self, remote_invoke_input: RemoteInvokeExecutionInfo) -> None: """ Instantiates remote invoke executor with populated resource summary information, executes it with the provided - input & returns its response back to the caller. If no executor can be instantiated it raises - NoExecutorFoundForRemoteInvoke exception. + input & returns its response back to the caller. If resource is not supported by command, raises + ResourceNotSupportedForRemoteInvoke exception. Parameters ---------- @@ -93,8 +93,8 @@ def run(self, remote_invoke_input: RemoteInvokeExecutionInfo) -> None: DefaultRemoteInvokeLogConsumer(self.stderr), ) if not remote_invoke_executor: - raise NoExecutorFoundForRemoteInvoke( - f"Resource type {self._resource_summary.resource_type} is not supported for remote invoke" + raise ResourceNotSupportedForRemoteInvoke( + f"Resource type {self._resource_summary.resource_type} is not supported for remote invoke." ) remote_invoke_executor.execute(remote_invoke_input) diff --git a/samcli/lib/remote_invoke/lambda_invoke_executors.py b/samcli/lib/remote_invoke/lambda_invoke_executors.py index 323aeceba3..911bdb0a96 100644 --- a/samcli/lib/remote_invoke/lambda_invoke_executors.py +++ b/samcli/lib/remote_invoke/lambda_invoke_executors.py @@ -6,11 +6,12 @@ import logging from abc import ABC, abstractmethod from json import JSONDecodeError -from typing import Any, cast +from typing import cast from botocore.eventstream import EventStream from botocore.exceptions import ClientError, ParamValidationError from botocore.response import StreamingBody +from mypy_boto3_lambda.client import LambdaClient from samcli.lib.remote_invoke.exceptions import ( ErrorBotoApiCallException, @@ -46,11 +47,11 @@ class AbstractLambdaInvokeExecutor(BotoActionExecutor, ABC): For Payload parameter, if a file location provided, the file handle will be passed as Payload object """ - _lambda_client: Any + _lambda_client: LambdaClient _function_name: str _remote_output_format: RemoteInvokeOutputFormat - def __init__(self, lambda_client: Any, function_name: str, remote_output_format: RemoteInvokeOutputFormat): + def __init__(self, lambda_client: LambdaClient, function_name: str, remote_output_format: RemoteInvokeOutputFormat): self._lambda_client = lambda_client self._function_name = function_name self._remote_output_format = remote_output_format @@ -60,7 +61,10 @@ def validate_action_parameters(self, parameters: dict) -> None: """ Validates the input boto parameters and prepares the parameters for calling the API. - :param parameters: Boto parameters provided as input + Parameters + ---------- + parameters: dict + Boto parameters provided as input """ for parameter_key, parameter_value in parameters.items(): if parameter_key == FUNCTION_NAME: @@ -82,7 +86,7 @@ def _execute_boto_call(self, boto_client_method) -> dict: except ParamValidationError as param_val_ex: raise InvalidResourceBotoParameterException( f"Invalid parameter key provided." - f" {str(param_val_ex).replace('{FUNCTION_NAME}, ', '').replace('{PAYLOAD}, ', '')}" + f" {str(param_val_ex).replace(f'{FUNCTION_NAME}, ', '').replace(f'{PAYLOAD}, ', '')}" ) from param_val_ex except ClientError as client_ex: if boto_utils.get_client_error_code(client_ex) == "ValidationException": @@ -215,7 +219,7 @@ def map(self, remote_invoke_input: RemoteInvokeResponse) -> RemoteInvokeResponse return remote_invoke_input -def _is_function_invoke_mode_response_stream(lambda_client: Any, function_name: str): +def _is_function_invoke_mode_response_stream(lambda_client: LambdaClient, function_name: str): """ Returns True if given function has RESPONSE_STREAM as InvokeMode, False otherwise """ diff --git a/samcli/lib/remote_invoke/remote_invoke_executor_factory.py b/samcli/lib/remote_invoke/remote_invoke_executor_factory.py index 19bf7ff106..14c93bb7bc 100644 --- a/samcli/lib/remote_invoke/remote_invoke_executor_factory.py +++ b/samcli/lib/remote_invoke/remote_invoke_executor_factory.py @@ -20,10 +20,12 @@ RemoteInvokeResponse, ResponseObjectToJsonStringMapper, ) -from samcli.lib.utils.cloudformation import CloudFormationResourceSummary -from samcli.lib.utils.resources import ( - AWS_LAMBDA_FUNCTION, +from samcli.lib.remote_invoke.stepfunctions_invoke_executors import ( + SfnDescribeExecutionResponseConverter, + StepFunctionsStartExecutionExecutor, ) +from samcli.lib.utils.cloudformation import CloudFormationResourceSummary +from samcli.lib.utils.resources import AWS_LAMBDA_FUNCTION LOG = logging.getLogger(__name__) @@ -85,11 +87,23 @@ def _create_lambda_boto_executor( """Creates a remote invoke executor for Lambda resource type based on the boto action being called. - :param cfn_resource_summary: Information about the Lambda resource + Parameters + ---------- + cfn_resource_summary: CloudFormationResourceSummary + Information about the Lambda resource + remote_invoke_output_format: RemoteInvokeOutputFormat + Response output format that will be used for remote invoke execution + response_consumer: RemoteInvokeConsumer[RemoteInvokeResponse] + Consumer instance which can process RemoteInvokeResponse events + log_consumer: RemoteInvokeConsumer[RemoteInvokeLogOutput] + Consumer instance which can process RemoteInvokeLogOutput events - :return: Returns the created remote invoke Executor + Returns + ------- + RemoteInvokeExecutor + Returns the Executor created for Lambda """ - LOG.info(f"Invoking Lambda Function {cfn_resource_summary.logical_resource_id}") + LOG.info("Invoking Lambda Function %s", cfn_resource_summary.logical_resource_id) lambda_client = self._boto_client_provider("lambda") mappers = [] if _is_function_invoke_mode_response_stream(lambda_client, cfn_resource_summary.physical_resource_id): @@ -127,6 +141,50 @@ def _create_lambda_boto_executor( log_consumer=log_consumer, ) + def _create_stepfunctions_boto_executor( + self, + cfn_resource_summary: CloudFormationResourceSummary, + remote_invoke_output_format: RemoteInvokeOutputFormat, + response_consumer: RemoteInvokeConsumer[RemoteInvokeResponse], + log_consumer: RemoteInvokeConsumer[RemoteInvokeLogOutput], + ) -> RemoteInvokeExecutor: + """Creates a remote invoke executor for Step Functions resource type based on + the boto action being called. + + Parameters + ---------- + cfn_resource_summary: CloudFormationResourceSummary + Information about the Step Function resource + remote_invoke_output_format: RemoteInvokeOutputFormat + Response output format that will be used for remote invoke execution + response_consumer: RemoteInvokeConsumer[RemoteInvokeResponse] + Consumer instance which can process RemoteInvokeResponse events + log_consumer: RemoteInvokeConsumer[RemoteInvokeLogOutput] + Consumer instance which can process RemoteInvokeLogOutput events + + Returns + ------- + RemoteInvokeExecutor + Returns the Executor created for Step Functions + """ + LOG.info("Invoking Step Function %s", cfn_resource_summary.logical_resource_id) + sfn_client = self._boto_client_provider("stepfunctions") + mappers = [] + if remote_invoke_output_format == RemoteInvokeOutputFormat.JSON: + mappers = [ + SfnDescribeExecutionResponseConverter(), + ResponseObjectToJsonStringMapper(), + ] + return RemoteInvokeExecutor( + request_mappers=[DefaultConvertToJSON()], + response_mappers=mappers, + boto_action_executor=StepFunctionsStartExecutionExecutor( + sfn_client, cfn_resource_summary.physical_resource_id, remote_invoke_output_format + ), + response_consumer=response_consumer, + log_consumer=log_consumer, + ) + # mapping definition for each supported resource type REMOTE_INVOKE_EXECUTOR_MAPPING: Dict[ str, @@ -140,6 +198,4 @@ def _create_lambda_boto_executor( ], RemoteInvokeExecutor, ], - ] = { - AWS_LAMBDA_FUNCTION: _create_lambda_boto_executor, - } + ] = {AWS_LAMBDA_FUNCTION: _create_lambda_boto_executor} diff --git a/samcli/lib/remote_invoke/stepfunctions_invoke_executors.py b/samcli/lib/remote_invoke/stepfunctions_invoke_executors.py new file mode 100644 index 0000000000..df8c9b2b4e --- /dev/null +++ b/samcli/lib/remote_invoke/stepfunctions_invoke_executors.py @@ -0,0 +1,154 @@ +""" +Remote invoke executor implementation for Step Functions +""" +import logging +import time +from datetime import datetime +from typing import cast + +from botocore.exceptions import ClientError, ParamValidationError +from mypy_boto3_stepfunctions import SFNClient + +from samcli.lib.remote_invoke.exceptions import ( + ErrorBotoApiCallException, + InvalideBotoResponseException, + InvalidResourceBotoParameterException, +) +from samcli.lib.remote_invoke.remote_invoke_executors import ( + BotoActionExecutor, + RemoteInvokeIterableResponseType, + RemoteInvokeLogOutput, + RemoteInvokeOutputFormat, + RemoteInvokeRequestResponseMapper, + RemoteInvokeResponse, +) + +LOG = logging.getLogger(__name__) +STATE_MACHINE_ARN = "stateMachineArn" +INPUT = "input" +RUNNING = "RUNNING" +SFN_EXECUTION_WAIT_TIME = 2 + + +class StepFunctionsStartExecutionExecutor(BotoActionExecutor): + """ + Calls "start_execution" method of "Step Functions" service with given input. + If a file location provided, the file handle will be passed as input object. + Calls "describe_execution" method after the executions starts to get more + execution details. + """ + + _stepfunctions_client: SFNClient + _state_machine_arn: str + _remote_output_format: RemoteInvokeOutputFormat + request_parameters: dict + + def __init__( + self, stepfunctions_client: SFNClient, physical_id: str, remote_output_format: RemoteInvokeOutputFormat + ): + self._stepfunctions_client = stepfunctions_client + self._remote_output_format = remote_output_format + self._state_machine_arn = physical_id + self.request_parameters = {} + + def validate_action_parameters(self, parameters: dict) -> None: + """ + Validates the input boto parameters and prepares the parameters for calling the API. + + Parameters + ---------- + parameters: dict + Boto parameters provided as input + """ + for parameter_key, parameter_value in parameters.items(): + if parameter_key == "stateMachineArn": + LOG.warning("stateMachineArn is defined using the value provided for resource_id argument.") + elif parameter_key == "input": + LOG.warning("input is defined using the value provided for either --event or --event-file options.") + else: + self.request_parameters[parameter_key] = parameter_value + + if not self.request_parameters.get("name"): + current_datetime = datetime.now().strftime("%Y%m%dT%H%M%S") + self.request_parameters["name"] = f"sam_remote_invoke_{current_datetime}" + + def _execute_action(self, payload: str) -> RemoteInvokeIterableResponseType: + """ + Calls "start_execution" method to start the execution and waits + for the execution to complete using the "describe_execution" method + + Parameters + ---------- + payload: str + The input which is passed to the execution + + Yields + ------ + RemoteInvokeIterableResponseType + Response that is consumed by remote invoke consumers after execution + """ + self.request_parameters[INPUT] = payload + self.request_parameters[STATE_MACHINE_ARN] = self._state_machine_arn + LOG.debug( + "Calling stepfunctions_client.start_execution with name:%s, input:%s, stateMachineArn:%s", + self.request_parameters["name"], + payload, + self._state_machine_arn, + ) + try: + start_execution_response = self._stepfunctions_client.start_execution(**self.request_parameters) + execution_arn = start_execution_response["executionArn"] + + execution_status = RUNNING + while execution_status == RUNNING: + describe_execution_response = cast( + dict, self._stepfunctions_client.describe_execution(executionArn=execution_arn) + ) + execution_status = describe_execution_response["status"] + LOG.debug("ExecutionArn: %s, status: %s", execution_arn, execution_status) + # Sleep to avoid throttling the API for longer executions + time.sleep(SFN_EXECUTION_WAIT_TIME) + + if self._remote_output_format == RemoteInvokeOutputFormat.JSON: + yield RemoteInvokeResponse(describe_execution_response) + if self._remote_output_format == RemoteInvokeOutputFormat.TEXT: + output_data = describe_execution_response.get("output", "") + error_data = describe_execution_response.get("error", "") + if output_data: + yield RemoteInvokeResponse(output_data) + return + if error_data: + error_cause = describe_execution_response.get("cause", "") + yield RemoteInvokeLogOutput( + f"The execution failed due to the error: {error_data} and cause: {error_cause}" + ) + return + except ParamValidationError as param_val_ex: + raise InvalidResourceBotoParameterException( + f"Invalid parameter key provided." + f" {str(param_val_ex).replace(f'{STATE_MACHINE_ARN}, ', '').replace(f'{INPUT}, ', '')}" + ) + except ClientError as client_ex: + raise ErrorBotoApiCallException(client_ex) from client_ex + + +class SfnDescribeExecutionResponseConverter(RemoteInvokeRequestResponseMapper[RemoteInvokeResponse]): + """ + This class helps to convert response from Step Function service. + This class converts any datetime objects in the response into strings + """ + + def map(self, remote_invoke_input: RemoteInvokeResponse) -> RemoteInvokeResponse: + LOG.debug("Mapping Step Function execution response to string object") + if not isinstance(remote_invoke_input.response, dict): + raise InvalideBotoResponseException( + "Invalid response type received from Step Functions service, expecting dict" + ) + + start_date_field = remote_invoke_input.response.get("startDate") + stop_date_field = remote_invoke_input.response.get("stopDate") + if start_date_field: + remote_invoke_input.response["startDate"] = start_date_field.strftime("%Y-%m-%d %H:%M:%S.%f%z") + if stop_date_field: + remote_invoke_input.response["stopDate"] = stop_date_field.strftime("%Y-%m-%d %H:%M:%S.%f%z") + return remote_invoke_input diff --git a/tests/unit/commands/remote/test_remote_invoke_context.py b/tests/unit/commands/remote/test_remote_invoke_context.py index e01d9de5fb..efae5b68b8 100644 --- a/tests/unit/commands/remote/test_remote_invoke_context.py +++ b/tests/unit/commands/remote/test_remote_invoke_context.py @@ -7,7 +7,7 @@ AmbiguousResourceForRemoteInvoke, NoResourceFoundForRemoteInvoke, UnsupportedServiceForRemoteInvoke, - NoExecutorFoundForRemoteInvoke, + ResourceNotSupportedForRemoteInvoke, InvalidStackNameProvidedForRemoteInvoke, ) from samcli.commands.remote.remote_invoke_context import RemoteInvokeContext, SUPPORTED_SERVICES @@ -118,7 +118,7 @@ def test_running_without_resource_summary_should_raise_exception(self, patched_g def test_running_with_unsupported_resource_should_raise_exception(self, patched_get_resource_summary): patched_get_resource_summary.return_value = Mock(resource_type="UnSupportedResource") with self._get_remote_invoke_context() as remote_invoke_context: - with self.assertRaises(NoExecutorFoundForRemoteInvoke): + with self.assertRaises(ResourceNotSupportedForRemoteInvoke): remote_invoke_context.run(Mock()) @patch("samcli.commands.remote.remote_invoke_context.RemoteInvokeExecutorFactory") @@ -126,6 +126,7 @@ def test_running_with_unsupported_resource_should_raise_exception(self, patched_ def test_running_should_execute_remote_invoke_executor_instance( self, patched_get_resource_summary, patched_remote_invoke_executor_factory ): + patched_get_resource_summary.return_value = Mock(resource_type=SUPPORTED_SERVICES["lambda"]) mocked_remote_invoke_executor_factory = Mock() patched_remote_invoke_executor_factory.return_value = mocked_remote_invoke_executor_factory mocked_remote_invoke_executor = Mock() diff --git a/tests/unit/lib/remote_invoke/test_remote_invoke_executor_factory.py b/tests/unit/lib/remote_invoke/test_remote_invoke_executor_factory.py index 57b5e7988c..8f76be8303 100644 --- a/tests/unit/lib/remote_invoke/test_remote_invoke_executor_factory.py +++ b/tests/unit/lib/remote_invoke/test_remote_invoke_executor_factory.py @@ -4,9 +4,7 @@ from parameterized import parameterized -from samcli.lib.remote_invoke.remote_invoke_executor_factory import ( - RemoteInvokeExecutorFactory, -) +from samcli.lib.remote_invoke.remote_invoke_executor_factory import RemoteInvokeExecutorFactory, AWS_LAMBDA_FUNCTION from samcli.lib.remote_invoke.remote_invoke_executors import RemoteInvokeOutputFormat @@ -15,6 +13,12 @@ def setUp(self) -> None: self.boto_client_provider_mock = Mock() self.remote_invoke_executor_factory = RemoteInvokeExecutorFactory(self.boto_client_provider_mock) + def test_supported_resource_executors(self): + supported_executors = self.remote_invoke_executor_factory.REMOTE_INVOKE_EXECUTOR_MAPPING + self.assertEqual(1, len(supported_executors)) + expected_executors = {AWS_LAMBDA_FUNCTION} + self.assertEqual(expected_executors, set(supported_executors.keys())) + @patch( "samcli.lib.remote_invoke.remote_invoke_executor_factory.RemoteInvokeExecutorFactory.REMOTE_INVOKE_EXECUTOR_MAPPING" ) @@ -132,3 +136,56 @@ def test_create_lambda_test_executor( response_consumer=given_response_consumer, log_consumer=given_log_consumer, ) + + @parameterized.expand(itertools.product([RemoteInvokeOutputFormat.JSON, RemoteInvokeOutputFormat.TEXT])) + @patch("samcli.lib.remote_invoke.remote_invoke_executor_factory.StepFunctionsStartExecutionExecutor") + @patch("samcli.lib.remote_invoke.remote_invoke_executor_factory.SfnDescribeExecutionResponseConverter") + @patch("samcli.lib.remote_invoke.remote_invoke_executor_factory.DefaultConvertToJSON") + @patch("samcli.lib.remote_invoke.remote_invoke_executor_factory.ResponseObjectToJsonStringMapper") + @patch("samcli.lib.remote_invoke.remote_invoke_executor_factory.RemoteInvokeExecutor") + def test_create_stepfunctions_test_executor( + self, + remote_invoke_output_format, + patched_remote_invoke_executor, + patched_object_to_json_converter, + patched_convert_to_default_json, + patched_response_converter, + patched_stepfunctions_invoke_executor, + ): + given_physical_resource_id = "physical_resource_id" + given_cfn_resource_summary = Mock(physical_resource_id=given_physical_resource_id) + + given_stepfunctions_client = Mock() + self.boto_client_provider_mock.return_value = given_stepfunctions_client + + given_remote_invoke_executor = Mock() + patched_remote_invoke_executor.return_value = given_remote_invoke_executor + + given_response_consumer = Mock() + given_log_consumer = Mock() + stepfunctions_executor = self.remote_invoke_executor_factory._create_stepfunctions_boto_executor( + given_cfn_resource_summary, remote_invoke_output_format, given_response_consumer, given_log_consumer + ) + + self.assertEqual(stepfunctions_executor, given_remote_invoke_executor) + self.boto_client_provider_mock.assert_called_with("stepfunctions") + patched_convert_to_default_json.assert_called_once() + + expected_mappers = [] + if remote_invoke_output_format == RemoteInvokeOutputFormat.JSON: + patched_object_to_json_converter.assert_called_once() + patched_response_converter.assert_called_once() + patched_stepfunctions_invoke_executor.assert_called_with( + given_stepfunctions_client, given_physical_resource_id, remote_invoke_output_format + ) + expected_mappers = [ + patched_response_converter(), + patched_object_to_json_converter(), + ] + patched_remote_invoke_executor.assert_called_with( + request_mappers=[patched_convert_to_default_json()], + response_mappers=expected_mappers, + boto_action_executor=patched_stepfunctions_invoke_executor(), + response_consumer=given_response_consumer, + log_consumer=given_log_consumer, + ) diff --git a/tests/unit/lib/remote_invoke/test_stepfunctions_invoke_executors.py b/tests/unit/lib/remote_invoke/test_stepfunctions_invoke_executors.py new file mode 100644 index 0000000000..adac0f00e8 --- /dev/null +++ b/tests/unit/lib/remote_invoke/test_stepfunctions_invoke_executors.py @@ -0,0 +1,165 @@ +from unittest import TestCase +from unittest.mock import patch, Mock + +from parameterized import parameterized, parameterized_class +from samcli.lib.remote_invoke.stepfunctions_invoke_executors import ( + SfnDescribeExecutionResponseConverter, + RemoteInvokeOutputFormat, + InvalideBotoResponseException, + StepFunctionsStartExecutionExecutor, + ParamValidationError, + InvalidResourceBotoParameterException, + ErrorBotoApiCallException, + ClientError, + RemoteInvokeLogOutput, +) +from samcli.lib.remote_invoke.remote_invoke_executors import RemoteInvokeExecutionInfo, RemoteInvokeResponse +from datetime import datetime + + +@parameterized_class( + "output", + [[RemoteInvokeOutputFormat.TEXT], [RemoteInvokeOutputFormat.JSON]], +) +class TestStepFunctionsStartExecutionExecutor(TestCase): + output: RemoteInvokeOutputFormat + + def setUp(self) -> None: + self.stepfunctions_client = Mock() + self.state_machine_arn = Mock() + self.stepfunctions_invoke_executor = StepFunctionsStartExecutionExecutor( + self.stepfunctions_client, self.state_machine_arn, self.output + ) + + @patch("samcli.lib.remote_invoke.stepfunctions_invoke_executors.time") + def test_execute_action_successful(self, patched_time): + patched_time.sleep = Mock() + mock_exec_name = "mock_execution_name" + mock_exec_arn = "MockArn" + given_input = '{"input_key": "value"}' + mock_response = { + "executionArn": mock_exec_arn, + "status": "SUCCEEDED", + "output": '{"output_key": "mock_output"}', + } + self.stepfunctions_client.start_execution.return_value = {"executionArn": mock_exec_arn} + self.stepfunctions_client.describe_execution.side_effect = [ + {"executionArn": mock_exec_arn, "status": "RUNNING"}, + mock_response, + ] + self.stepfunctions_invoke_executor.validate_action_parameters({"name": mock_exec_name}) + result = self.stepfunctions_invoke_executor._execute_action(given_input) + + if self.output == RemoteInvokeOutputFormat.JSON: + self.assertEqual(list(result), [RemoteInvokeResponse(mock_response)]) + else: + self.assertEqual(list(result), [RemoteInvokeResponse(mock_response["output"])]) + + self.stepfunctions_client.start_execution.assert_called_with( + stateMachineArn=self.state_machine_arn, input=given_input, name=mock_exec_name + ) + self.stepfunctions_client.describe_execution.assert_called() + + @patch("samcli.lib.remote_invoke.stepfunctions_invoke_executors.time") + def test_execute_action_not_successful(self, patched_time): + patched_time.sleep = Mock() + mock_exec_name = "mock_execution_name" + mock_exec_arn = "MockArn" + mock_error = "MockError" + mock_cause = "Execution failed due to mock error" + given_input = '{"input_key": "value"}' + mock_response = {"executionArn": mock_exec_arn, "status": "FAILED", "error": mock_error, "cause": mock_cause} + self.stepfunctions_client.start_execution.return_value = {"executionArn": mock_exec_arn} + self.stepfunctions_client.describe_execution.side_effect = [ + {"executionArn": mock_exec_arn, "status": "RUNNING"}, + mock_response, + ] + self.stepfunctions_invoke_executor.validate_action_parameters({"name": mock_exec_name}) + result = self.stepfunctions_invoke_executor._execute_action(given_input) + + expected_response = f"The execution failed due to the error: {mock_error} and cause: {mock_cause}" + if self.output == RemoteInvokeOutputFormat.JSON: + self.assertEqual(list(result), [RemoteInvokeResponse(mock_response)]) + else: + self.assertEqual(list(result), [RemoteInvokeLogOutput(expected_response)]) + + @parameterized.expand( + [ + ({}, {"name": "sam_remote_invoke_20230710T072625"}), + ({"name": "custom_execution_name"}, {"name": "custom_execution_name"}), + ( + {"traceHeader": "Mock X-Ray trace header"}, + {"traceHeader": "Mock X-Ray trace header", "name": "sam_remote_invoke_20230710T072625"}, + ), + ( + {"stateMachineArn": "ParameterProvidedArn", "input": "ParameterProvidedInput"}, + {"name": "sam_remote_invoke_20230710T072625"}, + ), + ( + {"invalidParameterKey": "invalidParameterValue"}, + {"invalidParameterKey": "invalidParameterValue", "name": "sam_remote_invoke_20230710T072625"}, + ), + ] + ) + @patch("samcli.lib.remote_invoke.stepfunctions_invoke_executors.datetime") + def test_validate_action_parameters(self, parameters, expected_boto_parameters, patched_datetime): + patched_datetime.now.return_value = datetime(2023, 7, 10, 7, 26, 25) + self.stepfunctions_invoke_executor.validate_action_parameters(parameters) + self.assertEqual(self.stepfunctions_invoke_executor.request_parameters, expected_boto_parameters) + + def test_execute_action_invalid_parameter_key_throws_parameter_validation_exception(self): + given_input = "input" + error = ParamValidationError(report="Invalid parameters") + self.stepfunctions_client.start_execution.side_effect = error + with self.assertRaises(InvalidResourceBotoParameterException): + self.stepfunctions_invoke_executor.validate_action_parameters({}) + for _ in self.stepfunctions_invoke_executor._execute_action(given_input): + pass + + def test_execute_action_throws_client_error_exception(self): + given_input = "input" + error = ClientError(error_response={"Error": {"Code": "MockException"}}, operation_name="invoke") + self.stepfunctions_client.start_execution.side_effect = error + with self.assertRaises(ErrorBotoApiCallException): + self.stepfunctions_invoke_executor.validate_action_parameters({}) + for _ in self.stepfunctions_invoke_executor._execute_action(given_input): + pass + + +class TestSfnDescribeExecutionResponseConverter(TestCase): + def setUp(self) -> None: + self.sfn_response_converter = SfnDescribeExecutionResponseConverter() + + def test_stepfunctions_response_conversion(self): + output_format = RemoteInvokeOutputFormat.JSON + given_output_string = "output string" + execution_date = datetime(2022, 12, 25, 00, 00, 00) + given_execution_result = { + "output": given_output_string, + "startDate": execution_date, + "stopDate": execution_date, + } + remote_invoke_execution_info = RemoteInvokeExecutionInfo(None, None, {}, output_format) + remote_invoke_execution_info.response = given_execution_result + + expected_result = { + "output": given_output_string, + "startDate": "2022-12-25 00:00:00.000000", + "stopDate": "2022-12-25 00:00:00.000000", + } + + result = self.sfn_response_converter.map(remote_invoke_execution_info) + + self.assertEqual(result.response, expected_result) + + def test_stepfunctions_invalid_response_exception(self): + output_format = RemoteInvokeOutputFormat.JSON + given_output_response = Mock() + given_output_string = "output string" + given_output_response.read().decode.return_value = given_output_string + given_test_result = [given_output_response] + remote_invoke_execution_info = RemoteInvokeExecutionInfo(None, None, {}, output_format) + remote_invoke_execution_info.response = given_test_result + + with self.assertRaises(InvalideBotoResponseException): + self.sfn_response_converter.map(remote_invoke_execution_info) From f508c570880d67d392f895e6d9345b3a65f0328a Mon Sep 17 00:00:00 2001 From: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Date: Tue, 11 Jul 2023 12:55:12 -0700 Subject: [PATCH 15/32] fix: use images.build rather than low level build API to catch errors (#5399) * fix: use container.build rather than low level build API to catch errors * use specific exception type rather than catching all * fix unit tests * add unit tests * add integration tests * fix messaging * Swap params --- samcli/lib/build/app_builder.py | 8 +++-- tests/integration/buildcmd/test_build_cmd.py | 34 +++++++++++++++++++ .../buildcmd/PythonImage/InvalidDockerfile | 16 +++++++++ .../unit/lib/build_module/test_app_builder.py | 28 +++++++++------ 4 files changed, 73 insertions(+), 13 deletions(-) create mode 100644 tests/integration/testdata/buildcmd/PythonImage/InvalidDockerfile diff --git a/samcli/lib/build/app_builder.py b/samcli/lib/build/app_builder.py index 50ddbd74e0..24df7dbad9 100644 --- a/samcli/lib/build/app_builder.py +++ b/samcli/lib/build/app_builder.py @@ -407,14 +407,18 @@ def _build_lambda_image(self, function_name: str, metadata: Dict, architecture: "dockerfile": dockerfile, "tag": docker_tag, "buildargs": docker_build_args, - "decode": True, "platform": get_docker_platform(architecture), "rm": True, } if docker_build_target: build_args["target"] = cast(str, docker_build_target) - build_logs = self._docker_client.api.build(**build_args) + try: + (build_image, build_logs) = self._docker_client.images.build(**build_args) + LOG.debug("%s image is built for %s function", build_image, function_name) + except docker.errors.BuildError as ex: + LOG.error("Failed building function %s", function_name) + raise DockerBuildFailed(str(ex)) from ex # The Docker-py low level api will stream logs back but if an exception is raised by the api # this is raised when accessing the generator. So we need to wrap accessing build_logs in a try: except. diff --git a/tests/integration/buildcmd/test_build_cmd.py b/tests/integration/buildcmd/test_build_cmd.py index 2b35d7c6e9..b45d2a2418 100644 --- a/tests/integration/buildcmd/test_build_cmd.py +++ b/tests/integration/buildcmd/test_build_cmd.py @@ -6,6 +6,7 @@ from pathlib import Path from typing import Set from unittest import skipIf +from uuid import uuid4 import jmespath import docker @@ -49,6 +50,39 @@ SKIP_SAR_TESTS = RUNNING_ON_CI and RUNNING_TEST_FOR_MASTER_ON_CI and not RUN_BY_CANARY +@skipIf(SKIP_DOCKER_TESTS, SKIP_DOCKER_MESSAGE) +class TestBuildingImageTypeLambdaDockerFileFailures(BuildIntegBase): + template = "template_image.yaml" + + def test_with_invalid_dockerfile_location(self): + overrides = { + "Runtime": "3.10", + "Handler": "handler", + "DockerFile": "ThisDockerfileDoesNotExist", + "Tag": uuid4().hex, + } + cmdlist = self.get_command_list(parameter_overrides=overrides) + command_result = run_command(cmdlist, cwd=self.working_dir) + + # confirm build failed + self.assertEqual(command_result.process.returncode, 1) + self.assertIn("Cannot locate specified Dockerfile", command_result.stderr.decode()) + + def test_with_invalid_dockerfile_definition(self): + overrides = { + "Runtime": "3.10", + "Handler": "handler", + "DockerFile": "InvalidDockerfile", + "Tag": uuid4().hex, + } + cmdlist = self.get_command_list(parameter_overrides=overrides) + command_result = run_command(cmdlist, cwd=self.working_dir) + + # confirm build failed + self.assertEqual(command_result.process.returncode, 1) + self.assertIn("COPY requires at least two arguments", command_result.stderr.decode()) + + @skipIf( # Hits public ECR pull limitation, move it to canary tests (not RUN_BY_CANARY and not CI_OVERRIDE), diff --git a/tests/integration/testdata/buildcmd/PythonImage/InvalidDockerfile b/tests/integration/testdata/buildcmd/PythonImage/InvalidDockerfile new file mode 100644 index 0000000000..04599ba872 --- /dev/null +++ b/tests/integration/testdata/buildcmd/PythonImage/InvalidDockerfile @@ -0,0 +1,16 @@ +ARG BASE_RUNTIME + +FROM public.ecr.aws/lambda/python:$BASE_RUNTIME + +ARG FUNCTION_DIR="/var/task" + +RUN mkdir -p $FUNCTION_DIR + +# invalid line below +COPY main.py + +COPY __init__.py $FUNCTION_DIR +COPY requirements.txt $FUNCTION_DIR + +RUN python -m pip install -r $FUNCTION_DIR/requirements.txt -t $FUNCTION_DIR + diff --git a/tests/unit/lib/build_module/test_app_builder.py b/tests/unit/lib/build_module/test_app_builder.py index 408ba8bb35..c2712a5247 100644 --- a/tests/unit/lib/build_module/test_app_builder.py +++ b/tests/unit/lib/build_module/test_app_builder.py @@ -1510,7 +1510,7 @@ def test_docker_build_raises_DockerBuildFailed_when_error_in_buildlog_stream(sel "DockerBuildArgs": {"a": "b"}, } - self.docker_client_mock.api.build.return_value = [{"error": "Function building failed"}] + self.docker_client_mock.images.build.return_value = (Mock(), [{"error": "Function building failed"}]) self.builder._build_lambda_image("Name", metadata, X86_64) @@ -1530,7 +1530,7 @@ def test_dockerfile_not_in_dockercontext(self): "Bad Request", response=response_mock, explanation="Cannot locate specified Dockerfile" ) self.builder._stream_lambda_image_build_logs = error_mock - self.docker_client_mock.api.build.return_value = [] + self.docker_client_mock.images.build.return_value = (Mock(), []) self.builder._build_lambda_image("Name", metadata, X86_64) @@ -1545,7 +1545,7 @@ def test_error_rerasises(self): error_mock = Mock() error_mock.side_effect = docker.errors.APIError("Bad Request", explanation="Some explanation") self.builder._stream_lambda_image_build_logs = error_mock - self.docker_client_mock.api.build.return_value = [] + self.docker_client_mock.images.build.return_value = (Mock(), []) self.builder._build_lambda_image("Name", metadata, X86_64) @@ -1557,7 +1557,7 @@ def test_can_build_image_function(self): "DockerBuildArgs": {"a": "b"}, } - self.docker_client_mock.api.build.return_value = [] + self.docker_client_mock.images.build.return_value = (Mock(), []) result = self.builder._build_lambda_image("Name", metadata, X86_64) @@ -1598,7 +1598,7 @@ def test_build_image_function_with_empty_metadata_raises_Docker_Build_Failed_Exc def test_can_build_image_function_without_tag(self): metadata = {"Dockerfile": "Dockerfile", "DockerContext": "context", "DockerBuildArgs": {"a": "b"}} - self.docker_client_mock.api.build.return_value = [] + self.docker_client_mock.images.build.return_value = (Mock(), []) result = self.builder._build_lambda_image("Name", metadata, X86_64) self.assertEqual(result, "name:latest") @@ -1613,19 +1613,18 @@ def test_can_build_image_function_under_debug(self, mock_os): "DockerBuildArgs": {"a": "b"}, } - self.docker_client_mock.api.build.return_value = [] + self.docker_client_mock.images.build.return_value = (Mock, []) result = self.builder._build_lambda_image("Name", metadata, X86_64) self.assertEqual(result, "name:Tag-debug") self.assertEqual( - self.docker_client_mock.api.build.call_args, + self.docker_client_mock.images.build.call_args, # NOTE (sriram-mv): path set to ANY to handle platform differences. call( path=ANY, dockerfile="Dockerfile", tag="name:Tag-debug", buildargs={"a": "b", "SAM_BUILD_MODE": "debug"}, - decode=True, platform="linux/amd64", rm=True, ), @@ -1642,24 +1641,31 @@ def test_can_build_image_function_under_debug_with_target(self, mock_os): "DockerBuildTarget": "stage", } - self.docker_client_mock.api.build.return_value = [] + self.docker_client_mock.images.build.return_value = (Mock(), []) result = self.builder._build_lambda_image("Name", metadata, X86_64) self.assertEqual(result, "name:Tag-debug") self.assertEqual( - self.docker_client_mock.api.build.call_args, + self.docker_client_mock.images.build.call_args, call( path=ANY, dockerfile="Dockerfile", tag="name:Tag-debug", buildargs={"a": "b", "SAM_BUILD_MODE": "debug"}, - decode=True, target="stage", platform="linux/amd64", rm=True, ), ) + def test_can_raise_build_error(self): + self.docker_client_mock.images.build.side_effect = docker.errors.BuildError( + reason="Missing Dockerfile", build_log="Build failed" + ) + + with self.assertRaises(DockerBuildFailed): + self.builder._build_lambda_image("Name", {}, X86_64) + class TestApplicationBuilder_build_function(TestCase): def setUp(self): From df526d8c708de3b8ededb28f0fed143c1e5591f2 Mon Sep 17 00:00:00 2001 From: Lucas <12496191+lucashuy@users.noreply.github.com> Date: Tue, 11 Jul 2023 13:22:02 -0700 Subject: [PATCH 16/32] Relax document version check for authorizers (#5477) --- samcli/commands/local/lib/swagger/parser.py | 17 +++++++++++++++-- .../commands/local/lib/swagger/test_parser.py | 18 ++++++++++++++++++ 2 files changed, 33 insertions(+), 2 deletions(-) diff --git a/samcli/commands/local/lib/swagger/parser.py b/samcli/commands/local/lib/swagger/parser.py index 9c46e0c631..68ada78024 100644 --- a/samcli/commands/local/lib/swagger/parser.py +++ b/samcli/commands/local/lib/swagger/parser.py @@ -81,7 +81,7 @@ def get_authorizers(self, event_type: str = Route.API) -> Dict[str, Authorizer]: authorizers: Dict[str, Authorizer] = {} authorizer_dict = {} - document_version = self.swagger.get(SwaggerParser._SWAGGER) or self.swagger.get(SwaggerParser._OPENAPI) or "" + document_version = self._get_document_version() if document_version.startswith(SwaggerParser._2_X_VERSION): LOG.debug("Parsing Swagger document using 2.0 specification") @@ -240,6 +240,19 @@ def _get_lambda_identity_sources( return identity_sources + def _get_document_version(self) -> str: + """ + Helper method to fetch the Swagger document version + + Returns + ------- + str + A string representing a version, blank if not found + """ + document_version = self.swagger.get(SwaggerParser._SWAGGER) or self.swagger.get(SwaggerParser._OPENAPI) or "" + + return str(document_version) + def get_default_authorizer(self, event_type: str) -> Union[str, None]: """ Parses the body definition to find root level Authorizer definitions @@ -254,7 +267,7 @@ def get_default_authorizer(self, event_type: str) -> Union[str, None]: Union[str, None] Returns the name of the authorizer, if there is one defined, otherwise None """ - document_version = self.swagger.get(SwaggerParser._SWAGGER) or self.swagger.get(SwaggerParser._OPENAPI) or "" + document_version = self._get_document_version() authorizers = self.swagger.get(SwaggerParser._SWAGGER_SECURITY, []) if not authorizers: diff --git a/tests/unit/commands/local/lib/swagger/test_parser.py b/tests/unit/commands/local/lib/swagger/test_parser.py index 84ce8899de..d854ca595e 100644 --- a/tests/unit/commands/local/lib/swagger/test_parser.py +++ b/tests/unit/commands/local/lib/swagger/test_parser.py @@ -1022,3 +1022,21 @@ def test_invalid_identity_source_throws_exception(self): with self.assertRaises(InvalidSecurityDefinition): parser._get_lambda_identity_sources(Mock(), "request", Route.API, properties, auth_properties) + + +class TestGetDocumentVersion(TestCase): + @parameterized.expand( + [ + ({"swagger": "2.0"}, "2.0"), + ({"swagger": 2.0}, "2.0"), + ({"openapi": "3.0"}, "3.0"), + ({"openapi": 3.0}, "3.0"), + ({"not valid": 3.0}, ""), + ({}, ""), + ] + ) + def test_get_document_version(self, swagger_doc, expected_output): + parser = SwaggerParser(Mock(), swagger_doc) + output = parser._get_document_version() + + self.assertEqual(output, expected_output) From 577d0d4e4671413f10cdaa5944ee53de91242841 Mon Sep 17 00:00:00 2001 From: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Date: Wed, 12 Jul 2023 09:49:32 -0700 Subject: [PATCH 17/32] fix: use StringIO instead of BytesIO with StreamWriter (#5485) * fix: use stringio in ECRUploader * add typing for inner stream instance --- .../local/cli_common/invoke_context.py | 8 ++++---- samcli/lib/package/ecr_uploader.py | 4 ++-- samcli/lib/utils/stream_writer.py | 12 ++++++------ samcli/local/apigw/local_apigw_service.py | 4 ++-- .../local_lambda_invoke_service.py | 2 +- samcli/local/services/base_local_service.py | 4 ++-- .../local/cli_common/test_invoke_context.py | 2 +- .../local/services/test_base_local_service.py | 18 +++++++++--------- 8 files changed, 27 insertions(+), 27 deletions(-) diff --git a/samcli/commands/local/cli_common/invoke_context.py b/samcli/commands/local/cli_common/invoke_context.py index 3ffcae914d..9b001e4d20 100644 --- a/samcli/commands/local/cli_common/invoke_context.py +++ b/samcli/commands/local/cli_common/invoke_context.py @@ -7,7 +7,7 @@ import os from enum import Enum from pathlib import Path -from typing import IO, Any, Dict, List, Optional, Tuple, Type, cast +from typing import Any, Dict, List, Optional, TextIO, Tuple, Type, cast from samcli.commands._utils.template import TemplateFailedParsingException, TemplateNotFoundException from samcli.commands.exceptions import ContainersInitializationException @@ -195,7 +195,7 @@ def __init__( self._stacks: List[Stack] = None # type: ignore self._env_vars_value: Optional[Dict] = None self._container_env_vars_value: Optional[Dict] = None - self._log_file_handle: Optional[IO] = None + self._log_file_handle: Optional[TextIO] = None self._debug_context: Optional[DebugContext] = None self._layers_downloader: Optional[LayerDownloader] = None self._container_manager: Optional[ContainerManager] = None @@ -487,7 +487,7 @@ def _get_env_vars_value(filename: Optional[str]) -> Optional[Dict]: ) from ex @staticmethod - def _setup_log_file(log_file: Optional[str]) -> Optional[IO]: + def _setup_log_file(log_file: Optional[str]) -> Optional[TextIO]: """ Open a log file if necessary and return the file handle. This will create a file if it does not exist @@ -497,7 +497,7 @@ def _setup_log_file(log_file: Optional[str]) -> Optional[IO]: if not log_file: return None - return open(log_file, "wb") + return open(log_file, "w") @staticmethod def _get_debug_context( diff --git a/samcli/lib/package/ecr_uploader.py b/samcli/lib/package/ecr_uploader.py index f2d4371407..0393596b39 100644 --- a/samcli/lib/package/ecr_uploader.py +++ b/samcli/lib/package/ecr_uploader.py @@ -2,8 +2,8 @@ Client for uploading packaged artifacts to ecr """ import base64 -import io import logging +from io import StringIO from typing import Dict import botocore @@ -94,7 +94,7 @@ def upload(self, image, resource_name): else: # we need to wait till the image got pushed to ecr, without this workaround sam sync for template # contains image always fail, because the provided ecr uri is not exist. - _log_streamer = LogStreamer(stream=StreamWriter(stream=io.BytesIO(), auto_flush=True)) + _log_streamer = LogStreamer(stream=StreamWriter(stream=StringIO(), auto_flush=True)) _log_streamer.stream_progress(push_logs) except (BuildError, APIError, LogStreamError) as ex: diff --git a/samcli/lib/utils/stream_writer.py b/samcli/lib/utils/stream_writer.py index e5d0b43c1e..99f72c1036 100644 --- a/samcli/lib/utils/stream_writer.py +++ b/samcli/lib/utils/stream_writer.py @@ -1,11 +1,11 @@ """ This class acts like a wrapper around output streams to provide any flexibility with output we need """ -from typing import Union +from typing import TextIO, Union class StreamWriter: - def __init__(self, stream, auto_flush: bool = False): + def __init__(self, stream: TextIO, auto_flush: bool = False): """ Instatiates new StreamWriter to the specified stream @@ -20,7 +20,7 @@ def __init__(self, stream, auto_flush: bool = False): self._auto_flush = auto_flush @property - def stream(self): + def stream(self) -> TextIO: return self._stream def write_bytes(self, output: Union[bytes, bytearray]): @@ -30,7 +30,7 @@ def write_bytes(self, output: Union[bytes, bytearray]): Parameters ---------- output bytes-like object - Bytes to write + Bytes to write into buffer """ self._stream.buffer.write(output) @@ -43,8 +43,8 @@ def write_str(self, output: str): Parameters ---------- - output bytes-like object - Bytes to write + output string object + String to write """ self._stream.write(output) diff --git a/samcli/local/apigw/local_apigw_service.py b/samcli/local/apigw/local_apigw_service.py index f979b2e9a3..b80b1fc2c2 100644 --- a/samcli/local/apigw/local_apigw_service.py +++ b/samcli/local/apigw/local_apigw_service.py @@ -4,7 +4,7 @@ import json import logging from datetime import datetime -from io import BytesIO +from io import StringIO from time import time from typing import Any, Dict, List, Optional @@ -605,7 +605,7 @@ def _invoke_lambda_function(self, lambda_function_name: str, event: dict) -> str str A string containing the output from the Lambda function """ - with BytesIO() as stdout: + with StringIO() as stdout: event_str = json.dumps(event, sort_keys=True) stdout_writer = StreamWriter(stdout, auto_flush=True) diff --git a/samcli/local/lambda_service/local_lambda_invoke_service.py b/samcli/local/lambda_service/local_lambda_invoke_service.py index c6d7506fb2..546066449c 100644 --- a/samcli/local/lambda_service/local_lambda_invoke_service.py +++ b/samcli/local/lambda_service/local_lambda_invoke_service.py @@ -162,7 +162,7 @@ def _invoke_request_handler(self, function_name): request_data = request_data.decode("utf-8") - stdout_stream = io.BytesIO() + stdout_stream = io.StringIO() stdout_stream_writer = StreamWriter(stdout_stream, auto_flush=True) try: diff --git a/samcli/local/services/base_local_service.py b/samcli/local/services/base_local_service.py index fcb7cd95ae..671d48888c 100644 --- a/samcli/local/services/base_local_service.py +++ b/samcli/local/services/base_local_service.py @@ -82,7 +82,7 @@ def service_response(body, headers, status_code): class LambdaOutputParser: @staticmethod - def get_lambda_output(stdout_stream: io.BytesIO) -> Tuple[str, bool]: + def get_lambda_output(stdout_stream: io.StringIO) -> Tuple[str, bool]: """ This method will extract read the given stream and return the response from Lambda function separated out from any log statements it might have outputted. Logs end up in the stdout stream if the Lambda function @@ -100,7 +100,7 @@ def get_lambda_output(stdout_stream: io.BytesIO) -> Tuple[str, bool]: bool If the response is an error/exception from the container """ - lambda_response = stdout_stream.getvalue().decode("utf-8") + lambda_response = stdout_stream.getvalue() # When the Lambda Function returns an Error/Exception, the output is added to the stdout of the container. From # our perspective, the container returned some value, which is not always true. Since the output is the only diff --git a/tests/unit/commands/local/cli_common/test_invoke_context.py b/tests/unit/commands/local/cli_common/test_invoke_context.py index 3cab08c82a..a9ba9d8b22 100644 --- a/tests/unit/commands/local/cli_common/test_invoke_context.py +++ b/tests/unit/commands/local/cli_common/test_invoke_context.py @@ -1106,7 +1106,7 @@ def test_must_open_file_for_writing(self): with patch("samcli.commands.local.cli_common.invoke_context.open", m): InvokeContext._setup_log_file(filename) - m.assert_called_with(filename, "wb") + m.assert_called_with(filename, "w") class TestInvokeContext_get_debug_context(TestCase): diff --git a/tests/unit/local/services/test_base_local_service.py b/tests/unit/local/services/test_base_local_service.py index fec13e25c9..34bc44c193 100644 --- a/tests/unit/local/services/test_base_local_service.py +++ b/tests/unit/local/services/test_base_local_service.py @@ -66,17 +66,17 @@ def test_create_returns_not_implemented(self): class TestLambdaOutputParser(TestCase): @parameterized.expand( [ - param("with mixed data and json response", b'data\n{"a": "b"}', 'data\n{"a": "b"}'), - param("with response as string", b"response", "response"), - param("with json response only", b'{"a": "b"}', '{"a": "b"}'), - param("with one new line and json", b'\n{"a": "b"}', '\n{"a": "b"}'), - param("with response only as string", b"this is the response line", "this is the response line"), - param("with whitespaces", b'data\n{"a": "b"} \n\n\n', 'data\n{"a": "b"} \n\n\n'), - param("with empty data", b"", ""), - param("with just new lines", b"\n\n", "\n\n"), + param("with mixed data and json response", 'data\n{"a": "b"}', 'data\n{"a": "b"}'), + param("with response as string", "response", "response"), + param("with json response only", '{"a": "b"}', '{"a": "b"}'), + param("with one new line and json", '\n{"a": "b"}', '\n{"a": "b"}'), + param("with response only as string", "this is the response line", "this is the response line"), + param("with whitespaces", 'data\n{"a": "b"} \n\n\n', 'data\n{"a": "b"} \n\n\n'), + param("with empty data", "", ""), + param("with just new lines", "\n\n", "\n\n"), param( "with whitespaces", - b"\n \n \n", + "\n \n \n", "\n \n \n", ), ] From 50c04ddb05774c43c1f8a2cf688182b7bfdde55b Mon Sep 17 00:00:00 2001 From: Daniel Mil <84205762+mildaniel@users.noreply.github.com> Date: Wed, 12 Jul 2023 11:24:37 -0700 Subject: [PATCH 18/32] chore: Update dependabot config (#5491) --- .github/dependabot.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 79b85d11ac..e90f44dae7 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -4,23 +4,25 @@ updates: - package-ecosystem: "github-actions" directory: "/" schedule: - interval: "weekly" + interval: "daily" commit-message: prefix: chore include: scope reviewers: - aws/serverless-application-experience-sbt + open-pull-requests-limit: 10 - package-ecosystem: "pip" directory: "/requirements" schedule: - interval: "weekly" + interval: "daily" target-branch: "develop" commit-message: prefix: chore include: scope reviewers: - aws/serverless-application-experience-sbt + open-pull-requests-limit: 10 ignore: # Ignored intentionally since we have a GHA that updates to more # completely From baff7420cbff4cf388fee428f58b1b580f844bef Mon Sep 17 00:00:00 2001 From: Lucas <12496191+lucashuy@users.noreply.github.com> Date: Wed, 12 Jul 2023 15:36:39 -0700 Subject: [PATCH 19/32] feat: Support YAML file formats for config files (#5469) * feat: Abstract SamConfig (#5208) * Abstract SamConfig and decouple TOML logic * Fix documentation and comments * Generalize exception for FileManager * Remove FileManager logic to its own file * Fix bug in setting a default FileManager * Implement requested changes This includes additional logging messages, as well as explicitly requiring file extensions * Include supported extensions in log call * Implement requested changes * Update docstrings * Refactor changes to preserve TOML comments * Allow file document to update properly * Remove duplicate data Since TOMLDocument wraps a Python dictionary anyway, we don't need the separate information * Add put comment for FileManager * Implement requested changes * Format files according to standard * Implement helper method for dict-like to TOMLDocument --------- Co-authored-by: Leonardo Gama * feat: Add YAML config file option (#5253) * Abstract SamConfig and decouple TOML logic * Fix documentation and comments * Generalize exception for FileManager * Remove FileManager logic to its own file * Fix bug in setting a default FileManager * Implement requested changes This includes additional logging messages, as well as explicitly requiring file extensions * Include supported extensions in log call * Implement requested changes * Update docstrings * Refactor changes to preserve TOML comments * Allow file document to update properly * Remove duplicate data Since TOMLDocument wraps a Python dictionary anyway, we don't need the separate information * Add put comment for FileManager * Implement requested changes * Format files according to standard * Implement helper method for dict-like to TOMLDocument * Implement YamlFileManager * Redefine YAML locally in class * Update YAML-cast method * Format correctly --------- Co-authored-by: Leonardo Gama * feat: Add JSON config file option (#5264) * Add JsonFileManager * Implement requested changes * Remove unused line in test --------- Co-authored-by: Leonardo Gama * chore: Refactor TomlProvider to ConfigProvider (#5273) Also, update docstrings to be in NumPy/SciPy format Co-authored-by: Leonardo Gama * feat: Add hierarchy for samconfig filetypes (#5297) * Add hierarchy for samconfig default filetypes * Formatting and fixing tests * Implement requested changes * Fix logic to properly allow default name * Fix linting issue * Fix failing Windows test * Update default config name in guided config --------- Co-authored-by: Leonardo Gama * feat: Track config file extension (#5315) * Add tracker for config file extensions * Repair broken integration tests * Clean up metric sort logic * Implement requested changes * Add Event unit tests * Fix formatting --------- Co-authored-by: Leonardo Gama * feat: Add and fix samconfig integration tests (#5371) * Add samconfig integration tests * Add config checks to guided deploy integration tests * Fix failing integration test on Windows * *Actually* fix failing Windows integration test * Implement requested changes * Fix logging imports * Implement requested changes * Fix bug comparing ParameterSource enum --------- Co-authored-by: Leonardo Gama * feat: Fix message when no config file is found (#5394) * Fix message when no config file found * Formatting --------- Co-authored-by: Leonardo Gama * chore: Rebase config project to develop (#5406) * fix: fix the hardcoded number of stages printed in logs. (#5210) * feat: Linking Authorizers to Lambda functions using the invocation URI (#5196) * Link authorizer to lambda function invoke URI * Updated doc string * Updated exception messages back * Added check for one element in reference list * Updated empty ref list check to not block * Updated log message * Fix long line lint error --------- Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> * chore(deps-dev): bump parameterized from 0.8.1 to 0.9.0 in /requirements (#5214) Bumps [parameterized](https://github.com/wolever/parameterized) from 0.8.1 to 0.9.0. - [Changelog](https://github.com/wolever/parameterized/blob/master/CHANGELOG.txt) - [Commits](https://github.com/wolever/parameterized/compare/v0.8.1...v0.9.0) --- updated-dependencies: - dependency-name: parameterized dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * chore(deps-dev): bump filelock from 3.10.7 to 3.12.0 in /requirements (#5213) Bumps [filelock](https://github.com/tox-dev/py-filelock) from 3.10.7 to 3.12.0. - [Release notes](https://github.com/tox-dev/py-filelock/releases) - [Changelog](https://github.com/tox-dev/py-filelock/blob/main/docs/changelog.rst) - [Commits](https://github.com/tox-dev/py-filelock/compare/3.10.7...3.12.0) --- updated-dependencies: - dependency-name: filelock dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * chore(deps): bump attrs from 22.2.0 to 23.1.0 in /requirements (#5212) Bumps [attrs](https://github.com/python-attrs/attrs) from 22.2.0 to 23.1.0. - [Release notes](https://github.com/python-attrs/attrs/releases) - [Changelog](https://github.com/python-attrs/attrs/blob/main/CHANGELOG.md) - [Commits](https://github.com/python-attrs/attrs/compare/22.2.0...23.1.0) --- updated-dependencies: - dependency-name: attrs dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * feat: update SAM CLI with latest App Templates commit hash (#5211) * feat: updating app templates repo hash with (a34f563f067e13df3eb350d36461b99397b6cda6) * dummy change to trigger checks * revert dummy commit --------- Co-authored-by: GitHub Action Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> * fix: fix failing Terraform integration test cases (#5218) * fix: fix the failing terraform integration test cases * fix: fix the resource address while accessing the module config resources * fix: fix checking the experimental log integration test cases * chore: bump version to 1.85.0 (#5226) * chore: use the SAR Application created in testing accounts (#5221) * chore: update aws_lambda_builders to 1.32.0 (#5215) Co-authored-by: GitHub Action Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> * feat: Added linking Gateway Method to Lambda Authorizer (#5228) * Added linking method to authorizer * Fixed docstring spelling mistake --------- Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> * feat: Return early during linking if no destination resources are found (#5220) * Returns during linking if no destination resources are found * Updated comment to correctly reflect state * Cleaned extra word --------- Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> * chore: Strengthen wording on "no Auth" during deploy (#5231) Co-authored-by: Jacob Fuss Co-authored-by: Sriram Madapusi Vasudevan <3770774+sriram-mv@users.noreply.github.com> * feat: Link Lambda Authorizer to Rest API (#5219) * Link RestApiId property for Lambda Authorizers * Updated docstring * Format files --------- Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> * feat: updating app templates repo hash with (9ee7db342025a42023882960b23ebfcde1d87422) (#5242) Co-authored-by: GitHub Action * fix: handle edge cases with function sync flow in sam sync command (#5222) * fix: handle special cases for function sync flow * update with unit tests * add integration tests * set ADL to false * fix update file methods * address comments * address comments to instantiate FunctionBuildInfo in the beginning * chore: Upgrade Mac installer to Py3.11 (#5223) * chore: Upgrade Mac installer to Py3.11 * Remove python in mac installer build process * Update hardcoded python version in build-mac.sh --------- Co-authored-by: Jacob Fuss * feat: updating app templates repo hash with (66f4a230d1c939a0c3f7b5647710c694c3a486f7) (#5245) Co-authored-by: GitHub Action * Revert "chore: Upgrade Mac installer to Py3.11 (#5223)" (#5252) This reverts commit 5954042d0bced7fea329c06930f021915ed9b746. * fix: add 3.11 to classifiers and upgrade Docker (#5225) * fix: add 3.11 to classifiers - update dependencies, need to nail down the versions. * Pin dev dependencies and handle excluding folders for mypy * Remove unneeded type: ignores * Fix name-match mypy errors * Fix empty-body error from mypy * Fix mypy errors by ignoring and get pytest to run/pass * Force mypy to not fail hopefully * Remove unneeded assignment * Update pinned requirements file --------- Co-authored-by: Jacob Fuss Co-authored-by: Jacob Fuss <32497805+jfuss@users.noreply.github.com> * fix: fix build and deploy SAR integration test cases (#5244) * fix: fix build SAR integration test cases * add comments to the UpdatableSARTemplate class usage. * fix black check * chore(deps): bump markupsafe from 2.1.2 to 2.1.3 in /requirements (#5257) Bumps [markupsafe](https://github.com/pallets/markupsafe) from 2.1.2 to 2.1.3. - [Release notes](https://github.com/pallets/markupsafe/releases) - [Changelog](https://github.com/pallets/markupsafe/blob/main/CHANGES.rst) - [Commits](https://github.com/pallets/markupsafe/compare/2.1.2...2.1.3) --- updated-dependencies: - dependency-name: markupsafe dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * chore(deps): bump pydantic from 1.10.7 to 1.10.8 in /requirements (#5258) Bumps [pydantic](https://github.com/pydantic/pydantic) from 1.10.7 to 1.10.8. - [Release notes](https://github.com/pydantic/pydantic/releases) - [Changelog](https://github.com/pydantic/pydantic/blob/v1.10.8/HISTORY.md) - [Commits](https://github.com/pydantic/pydantic/compare/v1.10.7...v1.10.8) --- updated-dependencies: - dependency-name: pydantic dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * feat: Add click command for cloud invoke command (#5238) * Add custom click option for cloud invoke called parameter * Added more error handling to executors and updated output-format enum to use auto * Add new CLI command for cloud invoke * Update samcli/commands/remote_invoke/invoke/cli.py Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * Update samcli/commands/remote_invoke/invoke/cli.py Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * Update samcli/commands/remote_invoke/cloud.py Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * Update samcli/cli/types.py Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * Address feedback * Moved all command options to be handled by click configuration * Updated validation function doc-string * Updated debug logs in types.py * Changed remote_invoke dir to cloud and updated log level for validation * Address feedback --------- Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * chore(deps-dev): bump boto3-stubs[apigateway,cloudformation,ecr,iam,lambda,s3,schemas,secretsmanager,signer,stepfunctions,sts,xray] (#5256) Bumps [boto3-stubs[apigateway,cloudformation,ecr,iam,lambda,s3,schemas,secretsmanager,signer,stepfunctions,sts,xray]](https://github.com/youtype/mypy_boto3_builder) from 1.26.131 to 1.26.146. - [Release notes](https://github.com/youtype/mypy_boto3_builder/releases) - [Commits](https://github.com/youtype/mypy_boto3_builder/commits) --- updated-dependencies: - dependency-name: boto3-stubs[apigateway,cloudformation,ecr,iam,lambda,s3,schemas,secretsmanager,signer,stepfunctions,sts,xray] dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * pin pytest-metadata to avoid its breaking change (#5261) * chore: update aws_lambda_builders to 1.33.0 (#5262) Co-authored-by: GitHub Action Co-authored-by: Daniel Mil <84205762+mildaniel@users.noreply.github.com> * chore: Add python3.11 to canaries (#5263) * chore: Add python3.11 to canaries * Remove python3.9 * Artifact export for GraphQLApi (#5250) * Artifact export for GraphQLApi * format * docstrings * fix unit tests * fix mypy issues * improve search method signature * chore: bump version to 1.86.0 (#5266) * fix: add constant str for enums to support deepcopy operation (#5265) * fix: add constant str for enums to support deepcopy operation * add unit tests * formatting * update automated updates gha to force restart of status checks (#5269) * integration tests for graphql resource package (#5271) * Revert "fix: add 3.11 to classifiers and upgrade Docker (#5225)" This reverts commit b51d6617340853d891469ff7a4dcc5bb88175389. * chore: bump version to 1.86.1 * chore: Upgrade Docker-py/ Support Py3.11 for running tests (#5279) * fix: add 3.11 to classifiers and upgrade Docker (#5225) * fix: add 3.11 to classifiers - update dependencies, need to nail down the versions. * Pin dev dependencies and handle excluding folders for mypy * Remove unneeded type: ignores * Fix name-match mypy errors * Fix empty-body error from mypy * Fix mypy errors by ignoring and get pytest to run/pass * Force mypy to not fail hopefully * Remove unneeded assignment * Update pinned requirements file --------- Co-authored-by: Jacob Fuss Co-authored-by: Jacob Fuss <32497805+jfuss@users.noreply.github.com> * chore: Force version on docker and allow unit test to run when docker not running In order for the docker.from_env() not to fail when docker is not installed/running, we force the min version on client creation. This was the default behavior in 4.X of docker-py but not longer in the latest version. --------- Co-authored-by: Sriram Madapusi Vasudevan <3770774+sriram-mv@users.noreply.github.com> Co-authored-by: Jacob Fuss * test: GHA to Execute Test without Docker Running (#5290) * test: Test without Docker running * Add build test * Run install * Remove success condition * Add continue on error * Add continue on error * Separate tests * Fix test name * Require new test * Address comments * Attempt to parameterize for windows * Attempt to parameterize for windows * Attempt to parameterize for windows * Set samdev in environment * Move skip to top of test class * fix: remove ruby3.2 from preview runtimes (#5296) * fix: remove ruby3.2 from preview runtimes * update {} with set() * Fix: Force docker version to match 4.2's default version (#5305) Co-authored-by: Jacob Fuss * chore: cleanup appveyor definitions for not running jobs which is already run with GHA & add docker info/version commands (#5306) * chore: remove redundant tests and setup from appveyor definitions * add/update docker info and docker version commands * add 3.11 and macos to GHAs * add some explanations to Windows section * fix: Fix failing tests on Python3.11 (#5317) * chore(deps): bump cryptography from 39.0.2 to 41.0.0 in /requirements (#5251) * chore(deps): bump cryptography from 39.0.2 to 41.0.0 in /requirements Bumps [cryptography](https://github.com/pyca/cryptography) from 39.0.2 to 41.0.0. - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/39.0.2...41.0.0) --- updated-dependencies: - dependency-name: cryptography dependency-type: direct:production ... Signed-off-by: dependabot[bot] * Bump pyopenssl version to support newer cryptography lib --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Lucas <12496191+lucashuy@users.noreply.github.com> Co-authored-by: Jacob Fuss <32497805+jfuss@users.noreply.github.com> Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * add sleep between close and reopen (#5320) * GraphQLApi support for `sam deploy` (#5294) * GraphQLApi support for `sam deploy` * unit tests and format fixes * fix: Update Arn parsing logic and fix some edge cases/bug fixes for remote invoke (#5295) * Fix some edge cases and bug fixes for remote invoke and update Arn parsing logic * Address feedback * Add unit test for s3 with no region/accoint_id provided * Renamed command to sam remote invoke * chore: update aws_lambda_builders to 1.34.0 (#5343) * chore: update aws_lambda_builders to 1.34.0 * Update base.txt --------- Co-authored-by: GitHub Action Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> * test: test building npm and Typescript projects using external manifest file. (#5283) * test: test building npm and Typescript projects using external manifest file. * fix mypy issues * remove node 12.x, and add the new node versions * run make format * chore(deps-dev): bump ruff from 0.0.261 to 0.0.272 in /requirements (#5337) Bumps [ruff](https://github.com/charliermarsh/ruff) from 0.0.261 to 0.0.272. - [Release notes](https://github.com/charliermarsh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/BREAKING_CHANGES.md) - [Commits](https://github.com/charliermarsh/ruff/compare/v0.0.261...v0.0.272) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * chore(deps-dev): bump pytest-cov from 4.0.0 to 4.1.0 in /requirements (#5335) Bumps [pytest-cov](https://github.com/pytest-dev/pytest-cov) from 4.0.0 to 4.1.0. - [Changelog](https://github.com/pytest-dev/pytest-cov/blob/master/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest-cov/compare/v4.0.0...v4.1.0) --- updated-dependencies: - dependency-name: pytest-cov dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * feat: add lambda streaming support for remote invoke (#5307) * feat: support response streaming with remote invoke * add invoker and mappers * Update output formatting of stream response * add unit tests * fix formatting * Add docs * address comments * formatting * move is_function_invoke_mode_response_stream into lambda invoke executors and add/update string constants * chore: bump version to 1.87.0 * Revert app templates gha (#5356) * Revert "add sleep between close and reopen (#5320)" This reverts commit 5be690c88d580cfeee7731f549c75ed7543f47c5. * Revert "update automated updates gha to force restart of status checks (#5269)" This reverts commit deb212bc21eda2be0290e9a30f296aa74331e6c3. * refactor: make remote invoke reactive to display results as soon as they are available (#5359) * refactor: make remote invoke reactive to display results as soon as they are available * addressed the comments * refactor init_clients in sam delete (#5360) * refactor init_clients in sam delete * remove unused line * use client_provider * fix broken tests * Update samcli/commands/delete/delete_context.py Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * add telemetry * fix format --------- Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * chore: update aws-sam-translator to 1.69.0 (#5370) Co-authored-by: GitHub Action * feat: sam remote invoke help text and UX fixes (#5366) * Improve remote invoke help text and fix some UX bugs * Updated help text for parameter option * Updated test class name * Updated test method name * Updated help text for output-format and event-file * Address feedback * Updated help text for parameter option * Changed --output-format name to output and the values to text/json * Handle empty event for lambda and read from stdin when - is passed for event-file * chore: temporary pin python version to 3.7.16 (#5384) * chore: temporary pin python version to 3.7.16 * fix github action syntax error * Updated cfn-lint to support ruby3.2 in validate (#5375) * Remove unneeded test cases (#5374) * Remove unneeded test cases * Removing the two integ test cases as there is already coverage in unit test for cases that no region is specified * feat: updating app templates repo hash with (67f28fd83477e0e15b394f995afb33b2053b4074) (#5362) Co-authored-by: GitHub Action Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * test: Integration tests for remote invoke on regular lambda functions (#5382) * Created base integ glass for remote invoke tests * Add integration tests for invoking lambda functions * make black * Moved tearDownClass to base class * Removed tearDown class from inherited classes and updated lambda fn timeout * Remove the check to skip appveyor tests on master branch * feat: Make remote invoke command available (#5381) * Enabled remote invoke command and updated docs link * Created base integ glass for remote invoke tests * Added end2end integ tests for remote invoke * make black * Moved tearDownClass to base class * Remove the check to skip appveyor tests on master branch * test: Remote invoke integration tests for response stream configured lambda functions (#5383) * Created base integ glass for remote invoke tests * Add integration tests for invoking response streaming lambda fns * make black * Moved tearDownClass to base class * Moved tearDownClass method to base class and removed architectures from template file * Remove the check to skip appveyor tests on master branch * chore: bump version to 1.88.0 (#5393) * chore: fix issues with appveyor ubuntu setup #5395 * chore: remove deprecated runtime dotnetcore3.1 (#5091) * chore: remove deprecated runtime dotnetcore3.1 * apply pr comments * fix(invoke): Write in UTF-8 string instead of bytes. (#5232) * fix(invoke): Write in UTF-8 string instead of bytes. It appears that we were using sys.stdout.buffer to support python2 and python3 at the same time. Switching to just write to sys.stdout allows us to write a utf-8 encoding string. When using sys.stdout.buffer, we can only write bytes and I couldn't get the correct UTF8 encoded string to print correctly. * Fix ruff errors * Update log_streamer.py to remove encoding * More updates to make everything work better in general * Fix with ruff again * Explictingly write to stream for building images * More patching writes * More patching * Fix long line * Use mock over io.string * More fixing of tests * Assert mock instead of data directly * More small edits in test * Verify through calls instead of value * run make black * Fix when we flush to match pervious behavior and output * add integration tests * run make black --------- Co-authored-by: Jacob Fuss Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * Revert "fix(invoke): Write in UTF-8 string instead of bytes. (#5232)" (#5401) This reverts commit 97104eac05c47aec1c7db62cb98cd050c7656d3d. * Add sanity check script and use it in pyinstaller GHA (#5400) * Add sanity check script and use it in pyinstaller GHA * set pipefail in sanity-check.sh * Make CI_OVERRIDE a global env var in the GHA workflow * setup go in GHA * disable telemetry * Update script to check binary existence and to fix an issue in go build * Resolve changes --------- Signed-off-by: dependabot[bot] Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> Co-authored-by: Lucas <12496191+lucashuy@users.noreply.github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: GitHub Action Co-authored-by: Jacob Fuss <32497805+jfuss@users.noreply.github.com> Co-authored-by: Jacob Fuss Co-authored-by: Sriram Madapusi Vasudevan <3770774+sriram-mv@users.noreply.github.com> Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Co-authored-by: Wing Fung Lau <4760060+hawflau@users.noreply.github.com> Co-authored-by: hnnasit <84355507+hnnasit@users.noreply.github.com> Co-authored-by: Daniel Mil <84205762+mildaniel@users.noreply.github.com> Co-authored-by: Slava Senchenko Co-authored-by: Leonardo Gama * Disable JSON file extension support (#5426) Co-authored-by: Leonardo Gama * feat: Repair and refine tests (#5431) * Fix failing integration test * Add FileManager check for array param --------- Co-authored-by: Leonardo Gama * chore: Update feat/config-files branch with changes from develop (#5466) * fix: fix the hardcoded number of stages printed in logs. (#5210) * feat: Linking Authorizers to Lambda functions using the invocation URI (#5196) * Link authorizer to lambda function invoke URI * Updated doc string * Updated exception messages back * Added check for one element in reference list * Updated empty ref list check to not block * Updated log message * Fix long line lint error --------- Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> * chore(deps-dev): bump parameterized from 0.8.1 to 0.9.0 in /requirements (#5214) Bumps [parameterized](https://github.com/wolever/parameterized) from 0.8.1 to 0.9.0. - [Changelog](https://github.com/wolever/parameterized/blob/master/CHANGELOG.txt) - [Commits](https://github.com/wolever/parameterized/compare/v0.8.1...v0.9.0) --- updated-dependencies: - dependency-name: parameterized dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * chore(deps-dev): bump filelock from 3.10.7 to 3.12.0 in /requirements (#5213) Bumps [filelock](https://github.com/tox-dev/py-filelock) from 3.10.7 to 3.12.0. - [Release notes](https://github.com/tox-dev/py-filelock/releases) - [Changelog](https://github.com/tox-dev/py-filelock/blob/main/docs/changelog.rst) - [Commits](https://github.com/tox-dev/py-filelock/compare/3.10.7...3.12.0) --- updated-dependencies: - dependency-name: filelock dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * chore(deps): bump attrs from 22.2.0 to 23.1.0 in /requirements (#5212) Bumps [attrs](https://github.com/python-attrs/attrs) from 22.2.0 to 23.1.0. - [Release notes](https://github.com/python-attrs/attrs/releases) - [Changelog](https://github.com/python-attrs/attrs/blob/main/CHANGELOG.md) - [Commits](https://github.com/python-attrs/attrs/compare/22.2.0...23.1.0) --- updated-dependencies: - dependency-name: attrs dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * feat: update SAM CLI with latest App Templates commit hash (#5211) * feat: updating app templates repo hash with (a34f563f067e13df3eb350d36461b99397b6cda6) * dummy change to trigger checks * revert dummy commit --------- Co-authored-by: GitHub Action Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> * fix: fix failing Terraform integration test cases (#5218) * fix: fix the failing terraform integration test cases * fix: fix the resource address while accessing the module config resources * fix: fix checking the experimental log integration test cases * chore: bump version to 1.85.0 (#5226) * chore: use the SAR Application created in testing accounts (#5221) * chore: update aws_lambda_builders to 1.32.0 (#5215) Co-authored-by: GitHub Action Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> * feat: Added linking Gateway Method to Lambda Authorizer (#5228) * Added linking method to authorizer * Fixed docstring spelling mistake --------- Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> * feat: Return early during linking if no destination resources are found (#5220) * Returns during linking if no destination resources are found * Updated comment to correctly reflect state * Cleaned extra word --------- Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> * chore: Strengthen wording on "no Auth" during deploy (#5231) Co-authored-by: Jacob Fuss Co-authored-by: Sriram Madapusi Vasudevan <3770774+sriram-mv@users.noreply.github.com> * feat: Link Lambda Authorizer to Rest API (#5219) * Link RestApiId property for Lambda Authorizers * Updated docstring * Format files --------- Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> * feat: updating app templates repo hash with (9ee7db342025a42023882960b23ebfcde1d87422) (#5242) Co-authored-by: GitHub Action * fix: handle edge cases with function sync flow in sam sync command (#5222) * fix: handle special cases for function sync flow * update with unit tests * add integration tests * set ADL to false * fix update file methods * address comments * address comments to instantiate FunctionBuildInfo in the beginning * chore: Upgrade Mac installer to Py3.11 (#5223) * chore: Upgrade Mac installer to Py3.11 * Remove python in mac installer build process * Update hardcoded python version in build-mac.sh --------- Co-authored-by: Jacob Fuss * feat: updating app templates repo hash with (66f4a230d1c939a0c3f7b5647710c694c3a486f7) (#5245) Co-authored-by: GitHub Action * Revert "chore: Upgrade Mac installer to Py3.11 (#5223)" (#5252) This reverts commit 5954042d0bced7fea329c06930f021915ed9b746. * fix: add 3.11 to classifiers and upgrade Docker (#5225) * fix: add 3.11 to classifiers - update dependencies, need to nail down the versions. * Pin dev dependencies and handle excluding folders for mypy * Remove unneeded type: ignores * Fix name-match mypy errors * Fix empty-body error from mypy * Fix mypy errors by ignoring and get pytest to run/pass * Force mypy to not fail hopefully * Remove unneeded assignment * Update pinned requirements file --------- Co-authored-by: Jacob Fuss Co-authored-by: Jacob Fuss <32497805+jfuss@users.noreply.github.com> * fix: fix build and deploy SAR integration test cases (#5244) * fix: fix build SAR integration test cases * add comments to the UpdatableSARTemplate class usage. * fix black check * chore(deps): bump markupsafe from 2.1.2 to 2.1.3 in /requirements (#5257) Bumps [markupsafe](https://github.com/pallets/markupsafe) from 2.1.2 to 2.1.3. - [Release notes](https://github.com/pallets/markupsafe/releases) - [Changelog](https://github.com/pallets/markupsafe/blob/main/CHANGES.rst) - [Commits](https://github.com/pallets/markupsafe/compare/2.1.2...2.1.3) --- updated-dependencies: - dependency-name: markupsafe dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * chore(deps): bump pydantic from 1.10.7 to 1.10.8 in /requirements (#5258) Bumps [pydantic](https://github.com/pydantic/pydantic) from 1.10.7 to 1.10.8. - [Release notes](https://github.com/pydantic/pydantic/releases) - [Changelog](https://github.com/pydantic/pydantic/blob/v1.10.8/HISTORY.md) - [Commits](https://github.com/pydantic/pydantic/compare/v1.10.7...v1.10.8) --- updated-dependencies: - dependency-name: pydantic dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * feat: Add click command for cloud invoke command (#5238) * Add custom click option for cloud invoke called parameter * Added more error handling to executors and updated output-format enum to use auto * Add new CLI command for cloud invoke * Update samcli/commands/remote_invoke/invoke/cli.py Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * Update samcli/commands/remote_invoke/invoke/cli.py Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * Update samcli/commands/remote_invoke/cloud.py Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * Update samcli/cli/types.py Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * Address feedback * Moved all command options to be handled by click configuration * Updated validation function doc-string * Updated debug logs in types.py * Changed remote_invoke dir to cloud and updated log level for validation * Address feedback --------- Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * chore(deps-dev): bump boto3-stubs[apigateway,cloudformation,ecr,iam,lambda,s3,schemas,secretsmanager,signer,stepfunctions,sts,xray] (#5256) Bumps [boto3-stubs[apigateway,cloudformation,ecr,iam,lambda,s3,schemas,secretsmanager,signer,stepfunctions,sts,xray]](https://github.com/youtype/mypy_boto3_builder) from 1.26.131 to 1.26.146. - [Release notes](https://github.com/youtype/mypy_boto3_builder/releases) - [Commits](https://github.com/youtype/mypy_boto3_builder/commits) --- updated-dependencies: - dependency-name: boto3-stubs[apigateway,cloudformation,ecr,iam,lambda,s3,schemas,secretsmanager,signer,stepfunctions,sts,xray] dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * pin pytest-metadata to avoid its breaking change (#5261) * chore: update aws_lambda_builders to 1.33.0 (#5262) Co-authored-by: GitHub Action Co-authored-by: Daniel Mil <84205762+mildaniel@users.noreply.github.com> * chore: Add python3.11 to canaries (#5263) * chore: Add python3.11 to canaries * Remove python3.9 * Artifact export for GraphQLApi (#5250) * Artifact export for GraphQLApi * format * docstrings * fix unit tests * fix mypy issues * improve search method signature * chore: bump version to 1.86.0 (#5266) * fix: add constant str for enums to support deepcopy operation (#5265) * fix: add constant str for enums to support deepcopy operation * add unit tests * formatting * update automated updates gha to force restart of status checks (#5269) * integration tests for graphql resource package (#5271) * Revert "fix: add 3.11 to classifiers and upgrade Docker (#5225)" This reverts commit b51d6617340853d891469ff7a4dcc5bb88175389. * chore: bump version to 1.86.1 * chore: Upgrade Docker-py/ Support Py3.11 for running tests (#5279) * fix: add 3.11 to classifiers and upgrade Docker (#5225) * fix: add 3.11 to classifiers - update dependencies, need to nail down the versions. * Pin dev dependencies and handle excluding folders for mypy * Remove unneeded type: ignores * Fix name-match mypy errors * Fix empty-body error from mypy * Fix mypy errors by ignoring and get pytest to run/pass * Force mypy to not fail hopefully * Remove unneeded assignment * Update pinned requirements file --------- Co-authored-by: Jacob Fuss Co-authored-by: Jacob Fuss <32497805+jfuss@users.noreply.github.com> * chore: Force version on docker and allow unit test to run when docker not running In order for the docker.from_env() not to fail when docker is not installed/running, we force the min version on client creation. This was the default behavior in 4.X of docker-py but not longer in the latest version. --------- Co-authored-by: Sriram Madapusi Vasudevan <3770774+sriram-mv@users.noreply.github.com> Co-authored-by: Jacob Fuss * test: GHA to Execute Test without Docker Running (#5290) * test: Test without Docker running * Add build test * Run install * Remove success condition * Add continue on error * Add continue on error * Separate tests * Fix test name * Require new test * Address comments * Attempt to parameterize for windows * Attempt to parameterize for windows * Attempt to parameterize for windows * Set samdev in environment * Move skip to top of test class * fix: remove ruby3.2 from preview runtimes (#5296) * fix: remove ruby3.2 from preview runtimes * update {} with set() * Fix: Force docker version to match 4.2's default version (#5305) Co-authored-by: Jacob Fuss * chore: cleanup appveyor definitions for not running jobs which is already run with GHA & add docker info/version commands (#5306) * chore: remove redundant tests and setup from appveyor definitions * add/update docker info and docker version commands * add 3.11 and macos to GHAs * add some explanations to Windows section * fix: Fix failing tests on Python3.11 (#5317) * chore(deps): bump cryptography from 39.0.2 to 41.0.0 in /requirements (#5251) * chore(deps): bump cryptography from 39.0.2 to 41.0.0 in /requirements Bumps [cryptography](https://github.com/pyca/cryptography) from 39.0.2 to 41.0.0. - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/39.0.2...41.0.0) --- updated-dependencies: - dependency-name: cryptography dependency-type: direct:production ... Signed-off-by: dependabot[bot] * Bump pyopenssl version to support newer cryptography lib --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Lucas <12496191+lucashuy@users.noreply.github.com> Co-authored-by: Jacob Fuss <32497805+jfuss@users.noreply.github.com> Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * add sleep between close and reopen (#5320) * GraphQLApi support for `sam deploy` (#5294) * GraphQLApi support for `sam deploy` * unit tests and format fixes * fix: Update Arn parsing logic and fix some edge cases/bug fixes for remote invoke (#5295) * Fix some edge cases and bug fixes for remote invoke and update Arn parsing logic * Address feedback * Add unit test for s3 with no region/accoint_id provided * Renamed command to sam remote invoke * chore: update aws_lambda_builders to 1.34.0 (#5343) * chore: update aws_lambda_builders to 1.34.0 * Update base.txt --------- Co-authored-by: GitHub Action Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> * test: test building npm and Typescript projects using external manifest file. (#5283) * test: test building npm and Typescript projects using external manifest file. * fix mypy issues * remove node 12.x, and add the new node versions * run make format * chore(deps-dev): bump ruff from 0.0.261 to 0.0.272 in /requirements (#5337) Bumps [ruff](https://github.com/charliermarsh/ruff) from 0.0.261 to 0.0.272. - [Release notes](https://github.com/charliermarsh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/BREAKING_CHANGES.md) - [Commits](https://github.com/charliermarsh/ruff/compare/v0.0.261...v0.0.272) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * chore(deps-dev): bump pytest-cov from 4.0.0 to 4.1.0 in /requirements (#5335) Bumps [pytest-cov](https://github.com/pytest-dev/pytest-cov) from 4.0.0 to 4.1.0. - [Changelog](https://github.com/pytest-dev/pytest-cov/blob/master/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest-cov/compare/v4.0.0...v4.1.0) --- updated-dependencies: - dependency-name: pytest-cov dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * feat: add lambda streaming support for remote invoke (#5307) * feat: support response streaming with remote invoke * add invoker and mappers * Update output formatting of stream response * add unit tests * fix formatting * Add docs * address comments * formatting * move is_function_invoke_mode_response_stream into lambda invoke executors and add/update string constants * chore: bump version to 1.87.0 * Revert app templates gha (#5356) * Revert "add sleep between close and reopen (#5320)" This reverts commit 5be690c88d580cfeee7731f549c75ed7543f47c5. * Revert "update automated updates gha to force restart of status checks (#5269)" This reverts commit deb212bc21eda2be0290e9a30f296aa74331e6c3. * refactor: make remote invoke reactive to display results as soon as they are available (#5359) * refactor: make remote invoke reactive to display results as soon as they are available * addressed the comments * refactor init_clients in sam delete (#5360) * refactor init_clients in sam delete * remove unused line * use client_provider * fix broken tests * Update samcli/commands/delete/delete_context.py Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * add telemetry * fix format --------- Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * chore: update aws-sam-translator to 1.69.0 (#5370) Co-authored-by: GitHub Action * feat: sam remote invoke help text and UX fixes (#5366) * Improve remote invoke help text and fix some UX bugs * Updated help text for parameter option * Updated test class name * Updated test method name * Updated help text for output-format and event-file * Address feedback * Updated help text for parameter option * Changed --output-format name to output and the values to text/json * Handle empty event for lambda and read from stdin when - is passed for event-file * chore: temporary pin python version to 3.7.16 (#5384) * chore: temporary pin python version to 3.7.16 * fix github action syntax error * Updated cfn-lint to support ruby3.2 in validate (#5375) * Remove unneeded test cases (#5374) * Remove unneeded test cases * Removing the two integ test cases as there is already coverage in unit test for cases that no region is specified * feat: updating app templates repo hash with (67f28fd83477e0e15b394f995afb33b2053b4074) (#5362) Co-authored-by: GitHub Action Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * test: Integration tests for remote invoke on regular lambda functions (#5382) * Created base integ glass for remote invoke tests * Add integration tests for invoking lambda functions * make black * Moved tearDownClass to base class * Removed tearDown class from inherited classes and updated lambda fn timeout * Remove the check to skip appveyor tests on master branch * feat: Make remote invoke command available (#5381) * Enabled remote invoke command and updated docs link * Created base integ glass for remote invoke tests * Added end2end integ tests for remote invoke * make black * Moved tearDownClass to base class * Remove the check to skip appveyor tests on master branch * test: Remote invoke integration tests for response stream configured lambda functions (#5383) * Created base integ glass for remote invoke tests * Add integration tests for invoking response streaming lambda fns * make black * Moved tearDownClass to base class * Moved tearDownClass method to base class and removed architectures from template file * Remove the check to skip appveyor tests on master branch * chore: bump version to 1.88.0 (#5393) * chore: fix issues with appveyor ubuntu setup #5395 * chore: remove deprecated runtime dotnetcore3.1 (#5091) * chore: remove deprecated runtime dotnetcore3.1 * apply pr comments * fix(invoke): Write in UTF-8 string instead of bytes. (#5232) * fix(invoke): Write in UTF-8 string instead of bytes. It appears that we were using sys.stdout.buffer to support python2 and python3 at the same time. Switching to just write to sys.stdout allows us to write a utf-8 encoding string. When using sys.stdout.buffer, we can only write bytes and I couldn't get the correct UTF8 encoded string to print correctly. * Fix ruff errors * Update log_streamer.py to remove encoding * More updates to make everything work better in general * Fix with ruff again * Explictingly write to stream for building images * More patching writes * More patching * Fix long line * Use mock over io.string * More fixing of tests * Assert mock instead of data directly * More small edits in test * Verify through calls instead of value * run make black * Fix when we flush to match pervious behavior and output * add integration tests * run make black --------- Co-authored-by: Jacob Fuss Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * Revert "fix(invoke): Write in UTF-8 string instead of bytes. (#5232)" (#5401) This reverts commit 97104eac05c47aec1c7db62cb98cd050c7656d3d. * Add sanity check script and use it in pyinstaller GHA (#5400) * Add sanity check script and use it in pyinstaller GHA * set pipefail in sanity-check.sh * Make CI_OVERRIDE a global env var in the GHA workflow * setup go in GHA * disable telemetry * Update script to check binary existence and to fix an issue in go build * chore: update aws-sam-translator to 1.70.0 (#5402) Co-authored-by: GitHub Action * Version bump to 1.89.0 (#5420) * chore(docs): updated readme with additional resources (#5349) * chore: updated gitignore to ignore tmp scratch directory used by dotnet tests * chore: update readme to include additional workshop resources and missing Powertools links. Fixed formatting --------- Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> * chore(deps): bump actions/setup-go from 3 to 4 (#5418) Bumps [actions/setup-go](https://github.com/actions/setup-go) from 3 to 4. - [Release notes](https://github.com/actions/setup-go/releases) - [Commits](https://github.com/actions/setup-go/compare/v3...v4) --- updated-dependencies: - dependency-name: actions/setup-go dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Sriram Madapusi Vasudevan <3770774+sriram-mv@users.noreply.github.com> * chore(deps-dev): bump filelock from 3.12.0 to 3.12.2 in /requirements (#5378) Bumps [filelock](https://github.com/tox-dev/py-filelock) from 3.12.0 to 3.12.2. - [Release notes](https://github.com/tox-dev/py-filelock/releases) - [Changelog](https://github.com/tox-dev/py-filelock/blob/main/docs/changelog.rst) - [Commits](https://github.com/tox-dev/py-filelock/compare/3.12.0...3.12.2) --- updated-dependencies: - dependency-name: filelock dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * feat: updating app templates repo hash with (bb905c379830c3d8edbc196bda731076549028e3) (#5398) Co-authored-by: GitHub Action * fix: add a table for package help text. (#5298) * fix: add a table for package help text. * Update samcli/commands/package/core/command.py Co-authored-by: Daniel Mil <84205762+mildaniel@users.noreply.github.com> * tests: fix strings in package help text * fix: PR comments * fix: PR comments. --------- Co-authored-by: Daniel Mil <84205762+mildaniel@users.noreply.github.com> * fix: Handle BROKEN_PIPE_ERROR (#5386) * Handle pywintypes pipe exception * Moved exception checking to check for winerror * Use decorator and added unit tests * Added failure test case * make format * Added more context/comments * fix: remove circular dependency by moving parse_s3 method to its own util file (#5430) * fix: remove circular dependency by moving parse_s3 method to its own util file * add missing unit tests file * chore(deps): bump sympy from 1.10.1 to 1.12 in /requirements (#5338) Bumps [sympy](https://github.com/sympy/sympy) from 1.10.1 to 1.12. - [Release notes](https://github.com/sympy/sympy/releases) - [Commits](https://github.com/sympy/sympy/compare/sympy-1.10.1...sympy-1.12) --- updated-dependencies: - dependency-name: sympy dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: hnnasit <84355507+hnnasit@users.noreply.github.com> * chore(deps): bump websocket-client from 1.5.1 to 1.6.1 in /requirements (#5417) Bumps [websocket-client](https://github.com/websocket-client/websocket-client) from 1.5.1 to 1.6.1. - [Release notes](https://github.com/websocket-client/websocket-client/releases) - [Changelog](https://github.com/websocket-client/websocket-client/blob/master/ChangeLog) - [Commits](https://github.com/websocket-client/websocket-client/compare/v1.5.1...v1.6.1) --- updated-dependencies: - dependency-name: websocket-client dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * chore(deps): bump ruamel-yaml from 0.17.21 to 0.17.32 in /requirements (#5376) * chore(deps): bump ruamel-yaml from 0.17.21 to 0.17.32 in /requirements Bumps [ruamel-yaml](https://sourceforge.net/p/ruamel-yaml/code/ci/default/tree) from 0.17.21 to 0.17.32. --- updated-dependencies: - dependency-name: ruamel-yaml dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Pin ruamel-yaml-clib version --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: hnnasit <84355507+hnnasit@users.noreply.github.com> Co-authored-by: Haresh Nasit * Updated package formatter to import package options instead of deploy (#5433) Co-authored-by: Sriram Madapusi Vasudevan <3770774+sriram-mv@users.noreply.github.com> * chore(deps): bump importlib-metadata in /requirements (#5437) Bumps [importlib-metadata](https://github.com/python/importlib_metadata) from 6.1.0 to 6.7.0. - [Release notes](https://github.com/python/importlib_metadata/releases) - [Changelog](https://github.com/python/importlib_metadata/blob/main/NEWS.rst) - [Commits](https://github.com/python/importlib_metadata/compare/v6.1.0...v6.7.0) --- updated-dependencies: - dependency-name: importlib-metadata dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * feat: `sam logs` help text (#5397) * feat: `sam logs` help text * fix: make ruff happy * fix: address comments * feat: enable terraform support for local start-api command (#5389) * feat: Enable hook-name and skip-prepare-infra flagf for sam local start-api (#5217) * Enable hook-name flag for sam local start-api * Format files * test: Terraform local start-api integration tests base (#5240) * feat: update SAM CLI with latest App Templates commit hash (#5211) * feat: updating app templates repo hash with (a34f563f067e13df3eb350d36461b99397b6cda6) * dummy change to trigger checks * revert dummy commit --------- Co-authored-by: GitHub Action Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> * Enable hook-name flag for sam local start-api * Format files * fix: fix failing Terraform integration test cases (#5218) * fix: fix the failing terraform integration test cases * fix: fix the resource address while accessing the module config resources * fix: fix checking the experimental log integration test cases * chore: bump version to 1.85.0 (#5226) * chore: use the SAR Application created in testing accounts (#5221) * chore: update aws_lambda_builders to 1.32.0 (#5215) Co-authored-by: GitHub Action Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> * feat: Added linking Gateway Method to Lambda Authorizer (#5228) * Added linking method to authorizer * Fixed docstring spelling mistake --------- Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> * feat: Return early during linking if no destination resources are found (#5220) * Returns during linking if no destination resources are found * Updated comment to correctly reflect state * Cleaned extra word --------- Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> * chore: Strengthen wording on "no Auth" during deploy (#5231) Co-authored-by: Jacob Fuss Co-authored-by: Sriram Madapusi Vasudevan <3770774+sriram-mv@users.noreply.github.com> * feat: Link Lambda Authorizer to Rest API (#5219) * Link RestApiId property for Lambda Authorizers * Updated docstring * Format files --------- Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> * Terraform start-api integration tests * Add test files * Uncomment skip --------- Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: GitHub Action Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> Co-authored-by: Lucas <12496191+lucashuy@users.noreply.github.com> Co-authored-by: Jacob Fuss <32497805+jfuss@users.noreply.github.com> Co-authored-by: Jacob Fuss Co-authored-by: Sriram Madapusi Vasudevan <3770774+sriram-mv@users.noreply.github.com> * feat: Added OpenApi body integration testing and updated property builder (#5291) * Added OpenApi body integration testing and updated property builder * Added more test cases * Changed tearDown to tearDownClass * Updated JSON body parser to handle parsing errors and added unit tests * Removed V1 references * feat: Terraform Authorizer resource testing (#5270) * Added authorizer project * Added project files * Removed extra print * Add request based authorizer testing * test: Test the unsupported limitations for local start api (#5309) * test: Test the unsupported limitations for local start api * fix lint issues * apply pr comments * fix: Bug Bash UX Issues (#5387) * Fix bug bash UX issues * Fix beta warning printing extra characters * Fix authorizer logging --------- Co-authored-by: Daniel Mil <84205762+mildaniel@users.noreply.github.com> Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: GitHub Action Co-authored-by: Lucas <12496191+lucashuy@users.noreply.github.com> Co-authored-by: Jacob Fuss <32497805+jfuss@users.noreply.github.com> Co-authored-by: Jacob Fuss Co-authored-by: Sriram Madapusi Vasudevan <3770774+sriram-mv@users.noreply.github.com> * Updated warning message about missing function in template (#5347) Co-authored-by: Wing Fung Lau <4760060+hawflau@users.noreply.github.com> * chore(deps-dev): bump types-pywin32 in /requirements (#5436) Bumps [types-pywin32](https://github.com/python/typeshed) from 306.0.0.0 to 306.0.0.2. - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-pywin32 dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * chore: use latest python version (#5439) * chore: remove the CDK SAM CLI integration testing and depend on the same test cases defined in CDK repo (#5410) * chore: fix CDK Appveyor job and deprecate testing CDK V1 * move CDK testing to GH Actions * fix spelling mistake * run only on aws-sam-cli repo * delete Appveyor Iac integration job * change cron schedule * update lambda functions to only use py3.9 * test on my github repo * run the GH action only on aws-sam-cli repo * update lambda functions to only use py3.9 * test on my github repo * run the GH action only on aws-sam-cli repo * chore: remove the CDK SAM CLI integration testing and depend on the same test cases defined in CDK repo. * chore: use amazon ecr credential helper in windows appveyor (#5446) * chore: bump version to 1.90.0 (#5448) * fix: Handler path mapping for layer-wrapped esbuild functions (#5450) * fix: Layer wrapping esbuild function handlers * Remove unused import * Use nodejs18 in tests * fix: fix macos reproducable task and gh actions (#5455) * feat(sync): support build-image option (#5441) * feat(sync): support build-image option * chore: adding build image option on help option * fix: Avoid Certain Depedendency Version (#5460) * Avoid broken click version * Pin boto3 and jsonschema * Update reproducible reqs * Ignore deprecation warnings in pytest * Pin jsonschema * feat: Abstract SamConfig (#5208) * Abstract SamConfig and decouple TOML logic * Fix documentation and comments * Generalize exception for FileManager * Remove FileManager logic to its own file * Fix bug in setting a default FileManager * Implement requested changes This includes additional logging messages, as well as explicitly requiring file extensions * Include supported extensions in log call * Implement requested changes * Update docstrings * Refactor changes to preserve TOML comments * Allow file document to update properly * Remove duplicate data Since TOMLDocument wraps a Python dictionary anyway, we don't need the separate information * Add put comment for FileManager * Implement requested changes * Format files according to standard * Implement helper method for dict-like to TOMLDocument --------- Co-authored-by: Leonardo Gama * feat: Add YAML config file option (#5253) * Abstract SamConfig and decouple TOML logic * Fix documentation and comments * Generalize exception for FileManager * Remove FileManager logic to its own file * Fix bug in setting a default FileManager * Implement requested changes This includes additional logging messages, as well as explicitly requiring file extensions * Include supported extensions in log call * Implement requested changes * Update docstrings * Refactor changes to preserve TOML comments * Allow file document to update properly * Remove duplicate data Since TOMLDocument wraps a Python dictionary anyway, we don't need the separate information * Add put comment for FileManager * Implement requested changes * Format files according to standard * Implement helper method for dict-like to TOMLDocument * Implement YamlFileManager * Redefine YAML locally in class * Update YAML-cast method * Format correctly --------- Co-authored-by: Leonardo Gama * feat: Add JSON config file option (#5264) * Add JsonFileManager * Implement requested changes * Remove unused line in test --------- Co-authored-by: Leonardo Gama * chore: Refactor TomlProvider to ConfigProvider (#5273) Also, update docstrings to be in NumPy/SciPy format Co-authored-by: Leonardo Gama * feat: Add hierarchy for samconfig filetypes (#5297) * Add hierarchy for samconfig default filetypes * Formatting and fixing tests * Implement requested changes * Fix logic to properly allow default name * Fix linting issue * Fix failing Windows test * Update default config name in guided config --------- Co-authored-by: Leonardo Gama * feat: Track config file extension (#5315) * Add tracker for config file extensions * Repair broken integration tests * Clean up metric sort logic * Implement requested changes * Add Event unit tests * Fix formatting --------- Co-authored-by: Leonardo Gama * feat: Add and fix samconfig integration tests (#5371) * Add samconfig integration tests * Add config checks to guided deploy integration tests * Fix failing integration test on Windows * *Actually* fix failing Windows integration test * Implement requested changes * Fix logging imports * Implement requested changes * Fix bug comparing ParameterSource enum --------- Co-authored-by: Leonardo Gama * feat: Fix message when no config file is found (#5394) * Fix message when no config file found * Formatting --------- Co-authored-by: Leonardo Gama * chore: Rebase config project to develop (#5406) * fix: fix the hardcoded number of stages printed in logs. (#5210) * feat: Linking Authorizers to Lambda functions using the invocation URI (#5196) * Link authorizer to lambda function invoke URI * Updated doc string * Updated exception messages back * Added check for one element in reference list * Updated empty ref list check to not block * Updated log message * Fix long line lint error --------- Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> * chore(deps-dev): bump parameterized from 0.8.1 to 0.9.0 in /requirements (#5214) Bumps [parameterized](https://github.com/wolever/parameterized) from 0.8.1 to 0.9.0. - [Changelog](https://github.com/wolever/parameterized/blob/master/CHANGELOG.txt) - [Commits](https://github.com/wolever/parameterized/compare/v0.8.1...v0.9.0) --- updated-dependencies: - dependency-name: parameterized dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * chore(deps-dev): bump filelock from 3.10.7 to 3.12.0 in /requirements (#5213) Bumps [filelock](https://github.com/tox-dev/py-filelock) from 3.10.7 to 3.12.0. - [Release notes](https://github.com/tox-dev/py-filelock/releases) - [Changelog](https://github.com/tox-dev/py-filelock/blob/main/docs/changelog.rst) - [Commits](https://github.com/tox-dev/py-filelock/compare/3.10.7...3.12.0) --- updated-dependencies: - dependency-name: filelock dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * chore(deps): bump attrs from 22.2.0 to 23.1.0 in /requirements (#5212) Bumps [attrs](https://github.com/python-attrs/attrs) from 22.2.0 to 23.1.0. - [Release notes](https://github.com/python-attrs/attrs/releases) - [Changelog](https://github.com/python-attrs/attrs/blob/main/CHANGELOG.md) - [Commits](https://github.com/python-attrs/attrs/compare/22.2.0...23.1.0) --- updated-dependencies: - dependency-name: attrs dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * feat: update SAM CLI with latest App Templates commit hash (#5211) * feat: updating app templates repo hash with (a34f563f067e13df3eb350d36461b99397b6cda6) * dummy change to trigger checks * revert dummy commit --------- Co-authored-by: GitHub Action Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> * fix: fix failing Terraform integration test cases (#5218) * fix: fix the failing terraform integration test cases * fix: fix the resource address while accessing the module config resources * fix: fix checking the experimental log integration test cases * chore: bump version to 1.85.0 (#5226) * chore: use the SAR Application created in testing accounts (#5221) * chore: update aws_lambda_builders to 1.32.0 (#5215) Co-authored-by: GitHub Action Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> * feat: Added linking Gateway Method to Lambda Authorizer (#5228) * Added linking method to authorizer * Fixed docstring spelling mistake --------- Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> * feat: Return early during linking if no destination resources are found (#5220) * Returns during linking if no destination resources are found * Updated comment to correctly reflect state * Cleaned extra word --------- Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> * chore: Strengthen wording on "no Auth" during deploy (#5231) Co-authored-by: Jacob Fuss Co-authored-by: Sriram Madapusi Vasudevan <3770774+sriram-mv@users.noreply.github.com> * feat: Link Lambda Authorizer to Rest API (#5219) * Link RestApiId property for Lambda Authorizers * Updated docstring * Format files --------- Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> * feat: updating app templates repo hash with (9ee7db342025a42023882960b23ebfcde1d87422) (#5242) Co-authored-by: GitHub Action * fix: handle edge cases with function sync flow in sam sync command (#5222) * fix: handle special cases for function sync flow * update with unit tests * add integration tests * set ADL to false * fix update file methods * address comments * address comments to instantiate FunctionBuildInfo in the beginning * chore: Upgrade Mac installer to Py3.11 (#5223) * chore: Upgrade Mac installer to Py3.11 * Remove python in mac installer build process * Update hardcoded python version in build-mac.sh --------- Co-authored-by: Jacob Fuss * feat: updating app templates repo hash with (66f4a230d1c939a0c3f7b5647710c694c3a486f7) (#5245) Co-authored-by: GitHub Action * Revert "chore: Upgrade Mac installer to Py3.11 (#5223)" (#5252) This reverts commit 5954042d0bced7fea329c06930f021915ed9b746. * fix: add 3.11 to classifiers and upgrade Docker (#5225) * fix: add 3.11 to classifiers - update dependencies, need to nail down the versions. * Pin dev dependencies and handle excluding folders for mypy * Remove unneeded type: ignores * Fix name-match mypy errors * Fix empty-body error from mypy * Fix mypy errors by ignoring and get pytest to run/pass * Force mypy to not fail hopefully * Remove unneeded assignment * Update pinned requirements file --------- Co-authored-by: Jacob Fuss Co-authored-by: Jacob Fuss <32497805+jfuss@users.noreply.github.com> * fix: fix build and deploy SAR integration test cases (#5244) * fix: fix build SAR integration test cases * add comments to the UpdatableSARTemplate class usage. * fix black check * chore(deps): bump markupsafe from 2.1.2 to 2.1.3 in /requirements (#5257) Bumps [markupsafe](https://github.com/pallets/markupsafe) from 2.1.2 to 2.1.3. - [Release notes](https://github.com/pallets/markupsafe/releases) - [Changelog](https://github.com/pallets/markupsafe/blob/main/CHANGES.rst) - [Commits](https://github.com/pallets/markupsafe/compare/2.1.2...2.1.3) --- updated-dependencies: - dependency-name: markupsafe dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * chore(deps): bump pydantic from 1.10.7 to 1.10.8 in /requirements (#5258) Bumps [pydantic](https://github.com/pydantic/pydantic) from 1.10.7 to 1.10.8. - [Release notes](https://github.com/pydantic/pydantic/releases) - [Changelog](https://github.com/pydantic/pydantic/blob/v1.10.8/HISTORY.md) - [Commits](https://github.com/pydantic/pydantic/compare/v1.10.7...v1.10.8) --- updated-dependencies: - dependency-name: pydantic dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * feat: Add click command for cloud invoke command (#5238) * Add custom click option for cloud invoke called parameter * Added more error handling to executors and updated output-format enum to use auto * Add new CLI command for cloud invoke * Update samcli/commands/remote_invoke/invoke/cli.py Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * Update samcli/commands/remote_invoke/invoke/cli.py Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * Update samcli/commands/remote_invoke/cloud.py Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * Update samcli/cli/types.py Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * Address feedback * Moved all command options to be handled by click configuration * Updated validation function doc-string * Updated debug logs in types.py * Changed remote_invoke dir to cloud and updated log level for validation * Address feedback --------- Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * chore(deps-dev): bump boto3-stubs[apigateway,cloudformation,ecr,iam,lambda,s3,schemas,secretsmanager,signer,stepfunctions,sts,xray] (#5256) Bumps [boto3-stubs[apigateway,cloudformation,ecr,iam,lambda,s3,schemas,secretsmanager,signer,stepfunctions,sts,xray]](https://github.com/youtype/mypy_boto3_builder) from 1.26.131 to 1.26.146. - [Release notes](https://github.com/youtype/mypy_boto3_builder/releases) - [Commits](https://github.com/youtype/mypy_boto3_builder/commits) --- updated-dependencies: - dependency-name: boto3-stubs[apigateway,cloudformation,ecr,iam,lambda,s3,schemas,secretsmanager,signer,stepfunctions,sts,xray] dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * pin pytest-metadata to avoid its breaking change (#5261) * chore: update aws_lambda_builders to 1.33.0 (#5262) Co-authored-by: GitHub Action Co-authored-by: Daniel Mil <84205762+mildaniel@users.noreply.github.com> * chore: Add python3.11 to canaries (#5263) * chore: Add python3.11 to canaries * Remove python3.9 * Artifact export for GraphQLApi (#5250) * Artifact export for GraphQLApi * format * docstrings * fix unit tests * fix mypy issues * improve search method signature * chore: bump version to 1.86.0 (#5266) * fix: add constant str for enums to support deepcopy operation (#5265) * fix: add constant str for enums to support deepcopy operation * add unit tests * formatting * update automated updates gha to force restart of status checks (#5269) * integration tests for graphql resource package (#5271) * Revert "fix: add 3.11 to classifiers and upgrade Docker (#5225)" This reverts commit b51d6617340853d891469ff7a4dcc5bb88175389. * chore: bump version to 1.86.1 * chore: Upgrade Docker-py/ Support Py3.11 for running tests (#5279) * fix: add 3.11 to classifiers and upgrade Docker (#5225) * fix: add 3.11 to classifiers - update dependencies, need to nail down the versions. * Pin dev dependencies and handle excluding folders for mypy * Remove unneeded type: ignores * Fix name-match mypy errors * Fix empty-body error from mypy * Fix mypy errors by ignoring and get pytest to run/pass * Force mypy to not fail hopefully * Remove unneeded assignment * Update pinned requirements file --------- Co-authored-by: Jacob Fuss Co-authored-by: Jacob Fuss <32497805+jfuss@users.noreply.github.com> * chore: Force version on docker and allow unit test to run when docker not running In order for the docker.from_env() not to fail when docker is not installed/running, we force the min version on client creation. This was the default behavior in 4.X of docker-py but not longer in the latest version. --------- Co-authored-by: Sriram Madapusi Vasudevan <3770774+sriram-mv@users.noreply.github.com> Co-authored-by: Jacob Fuss * test: GHA to Execute Test without Docker Running (#5290) * test: Test without Docker running * Add build test * Run install * Remove success condition * Add continue on error * Add continue on error * Separate tests * Fix test name * Require new test * Address comments * Attempt to parameterize for windows * Attempt to parameterize for windows * Attempt to parameterize for windows * Set samdev in environment * Move skip to top of test class * fix: remove ruby3.2 from preview runtimes (#5296) * fix: remove ruby3.2 from preview runtimes * update {} with set() * Fix: Force docker version to match 4.2's default version (#5305) Co-authored-by: Jacob Fuss * chore: cleanup appveyor definitions for not running jobs which is already run with GHA & add docker info/version commands (#5306) * chore: remove redundant tests and setup from appveyor definitions * add/update docker info and docker version commands * add 3.11 and macos to GHAs * add some explanations to Windows section * fix: Fix failing tests on Python3.11 (#5317) * chore(deps): bump cryptography from 39.0.2 to 41.0.0 in /requirements (#5251) * chore(deps): bump cryptography from 39.0.2 to 41.0.0 in /requirements Bumps [cryptography](https://github.com/pyca/cryptography) from 39.0.2 to 41.0.0. - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/39.0.2...41.0.0) --- updated-dependencies: - dependency-name: cryptography dependency-type: direct:production ... Signed-off-by: dependabot[bot] * Bump pyopenssl version to support newer cryptography lib --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Lucas <12496191+lucashuy@users.noreply.github.com> Co-authored-by: Jacob Fuss <32497805+jfuss@users.noreply.github.com> Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * add sleep between close and reopen (#5320) * GraphQLApi support for `sam deploy` (#5294) * GraphQLApi support for `sam deploy` * unit tests and format fixes * fix: Update Arn parsing logic and fix some edge cases/bug fixes for remote invoke (#5295) * Fix some edge cases and bug fixes for remote invoke and update Arn parsing logic * Address feedback * Add unit test for s3 with no region/accoint_id provided * Renamed command to sam remote invoke * chore: update aws_lambda_builders to 1.34.0 (#5343) * chore: update aws_lambda_builders to 1.34.0 * Update base.txt --------- Co-authored-by: GitHub Action Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> * test: test building npm and Typescript projects using external manifest file. (#5283) * test: test building npm and Typescript projects using external manifest file. * fix mypy issues * remove node 12.x, and add the new node versions * run make format * chore(deps-dev): bump ruff from 0.0.261 to 0.0.272 in /requirements (#5337) Bumps [ruff](https://github.com/charliermarsh/ruff) from 0.0.261 to 0.0.272. - [Release notes](https://github.com/charliermarsh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/BREAKING_CHANGES.md) - [Commits](https://github.com/charliermarsh/ruff/compare/v0.0.261...v0.0.272) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * chore(deps-dev): bump pytest-cov from 4.0.0 to 4.1.0 in /requirements (#5335) Bumps [pytest-cov](https://github.com/pytest-dev/pytest-cov) from 4.0.0 to 4.1.0. - [Changelog](https://github.com/pytest-dev/pytest-cov/blob/master/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest-cov/compare/v4.0.0...v4.1.0) --- updated-dependencies: - dependency-name: pytest-cov dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * feat: add lambda streaming support for remote invoke (#5307) * feat: support response streaming with remote invoke * add invoker and mappers * Update output formatting of stream response * add unit tests * fix formatting * Add docs * address comments * formatting * move is_function_invoke_mode_response_stream into lambda invoke executors and add/update string constants * chore: bump version to 1.87.0 * Revert app templates gha (#5356) * Revert "add sleep between close and reopen (#5320)" This reverts commit 5be690c88d580cfeee7731f549c75ed7543f47c5. * Revert "update automated updates gha to force restart of status checks (#5269)" This reverts commit deb212bc21eda2be0290e9a30f296aa74331e6c3. * refactor: make remote invoke reactive to display results as soon as they are available (#5359) * refactor: make remote invoke reactive to display results as soon as they are available * addressed the comments * refactor init_clients in sam delete (#5360) * refactor init_clients in sam delete * remove unused line * use client_provider * fix broken tests * Update samcli/commands/delete/delete_context.py Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * add telemetry * fix format --------- Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * chore: update aws-sam-translator to 1.69.0 (#5370) Co-authored-by: GitHub Action * feat: sam remote invoke help text and UX fixes (#5366) * Improve remote invoke help text and fix some UX bugs * Updated help text for parameter option * Updated test class name * Updated test method name * Updated help text for output-format and event-file * Address feedback * Updated help text for parameter option * Changed --output-format name to output and the values to text/json * Handle empty event for lambda and read from stdin when - is passed for event-file * chore: temporary pin python version to 3.7.16 (#5384) * chore: temporary pin python version to 3.7.16 * fix github action syntax error * Updated cfn-lint to support ruby3.2 in validate (#5375) * Remove unneeded test cases (#5374) * Remove unneeded test cases * Removing the two integ test cases as there is already coverage in unit test for cases that no region is specified * feat: updating app templates repo hash with (67f28fd83477e0e15b394f995afb33b2053b4074) (#5362) Co-authored-by: GitHub Action Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * test: Integration tests for remote invoke on regular lambda functions (#5382) * Created base integ glass for remote invoke tests * Add integration tests for invoking lambda functions * make black * Moved tearDownClass to base class * Removed tearDown class from inherited classes and updated lambda fn timeout * Remove the check to skip appveyor tests on master branch * feat: Make remote invoke command available (#5381) * Enabled remote invoke command and updated docs link * Created base integ glass for remote invoke tests * Added end2end integ tests for remote invoke * make black * Moved tearDownClass to base class * Remove the check to skip appveyor tests on master branch * test: Remote invoke integration tests for response stream configured lambda functions (#5383) * Created base integ glass for remote invoke tests * Add integration tests for invoking response streaming lambda fns * make black * Moved tearDownClass to base class * Moved tearDownClass method to base class and removed architectures from template file * Remove the check to skip appveyor tests on master branch * chore: bump version to 1.88.0 (#5393) * chore: fix issues with appveyor ubuntu setup #5395 * chore: remove deprecated runtime dotnetcore3.1 (#5091) * chore: remove deprecated runtime dotnetcore3.1 * apply pr comments * fix(invoke): Write in UTF-8 string instead of bytes. (#5232) * fix(invoke): Write in UTF-8 string instead of bytes. It appears that we were using sys.stdout.buffer to support python2 and python3 at the same time. Switching to just write to sys.stdout allows us to write a utf-8 encoding string. When using sys.stdout.buffer, we can only write bytes and I couldn't get the correct UTF8 encoded string to print correctly. * Fix ruff errors * Update log_streamer.py to remove encoding * More updates to make everything work better in general * Fix with ruff again * Explictingly write to stream for building images * More patching writes * More patching * Fix long line * Use mock over io.string * More fixing of tests * Assert mock instead of data directly * More small edits in test * Verify through calls instead of value * run make black * Fix when we flush to match pervious behavior and output * add integration tests * run make black --------- Co-authored-by: Jacob Fuss Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> * Revert "fix(invoke): Write in UTF-8 string instead of bytes. (#5232)" (#5401) This reverts commit 97104eac05c47aec1c7db62cb98cd050c7656d3d. * Add sanity check script and use it in pyinstaller GHA (#5400) * Add sanity check script and use it in pyinstaller GHA * set pipefail in sanity-check.sh * Make CI_OVERRIDE a global env var in the GHA workflow * setup go in GHA * disable telemetry * Update script to check binary existence and to fix an issue in go build * Resolve changes --------- Signed-off-by: dependabot[bot] Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> Co-authored-by: Lucas <12496191+lucashuy@users.noreply.github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: GitHub Action Co-authored-by: Jacob Fuss <32497805+jfuss@users.noreply.github.com> Co-authored-by: Jacob Fuss Co-authored-by: Sriram Madapusi Vasudevan <3770774+sriram-mv@users.noreply.github.com> Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Co-authored-by: Wing Fung Lau <4760060+hawflau@users.noreply.github.com> Co-authored-by: hnnasit <84355507+hnnasit@users.noreply.github.com> Co-authored-by: Daniel Mil <84205762+mildaniel@users.noreply.github.com> Co-authored-by: Slava Senchenko Co-authored-by: Leonardo Gama * Disable JSON file extension support (#5426) Co-authored-by: Leonardo Gama * feat: Repair and refine tests (#5431) * Fix failing integration test * Add FileManager check for array param --------- Co-authored-by: Leonardo Gama * Fixed merge error --------- Signed-off-by: dependabot[bot] Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Co-authored-by: aws-sam-cli-bot <46753707+aws-sam-cli-bot@users.noreply.github.com> Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: GitHub Action Co-authored-by: Jacob Fuss <32497805+jfuss@users.noreply.github.com> Co-authored-by: Jacob Fuss Co-authored-by: Sriram Madapusi Vasudevan <3770774+sriram-mv@users.noreply.github.com> Co-authored-by: Wing Fung Lau <4760060+hawflau@users.noreply.github.com> Co-authored-by: hnnasit <84355507+hnnasit@users.noreply.github.com> Co-authored-by: Daniel Mil <84205762+mildaniel@users.noreply.github.com> Co-authored-by: Slava Senchenko Co-authored-by: Stephen Liedig Co-authored-by: Haresh Nasit Co-authored-by: Elvis Henrique Pereira Co-authored-by: Leonardo Gama <51037424+Leo10Gama@users.noreply.github.com> Co-authored-by: Leonardo Gama * Revert Python version pinning * Moved to using constants in the click arguments for config-file and config-env --------- Signed-off-by: dependabot[bot] Co-authored-by: Leonardo Gama <51037424+Leo10Gama@users.noreply.github.com> Co-authored-by: Leonardo Gama Co-authored-by: Mohamed Elasmar <71043312+moelasmar@users.noreply.github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: GitHub Action Co-authored-by: Jacob Fuss <32497805+jfuss@users.noreply.github.com> Co-authored-by: Jacob Fuss Co-authored-by: Sriram Madapusi Vasudevan <3770774+sriram-mv@users.noreply.github.com> Co-authored-by: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Co-authored-by: Wing Fung Lau <4760060+hawflau@users.noreply.github.com> Co-authored-by: hnnasit <84355507+hnnasit@users.noreply.github.com> Co-authored-by: Daniel Mil <84205762+mildaniel@users.noreply.github.com> Co-authored-by: Slava Senchenko Co-authored-by: aws-sam-cli-bot <46753707+aws-sam-cli-bot@users.noreply.github.com> Co-authored-by: Stephen Liedig Co-authored-by: Haresh Nasit Co-authored-by: Elvis Henrique Pereira --- samcli/cli/cli_config_file.py | 194 +++++++--- .../_utils/custom_options/hook_name_option.py | 2 +- samcli/commands/build/command.py | 4 +- samcli/commands/delete/delete_context.py | 6 +- samcli/commands/deploy/command.py | 4 +- samcli/commands/deploy/guided_config.py | 18 +- samcli/commands/init/command.py | 4 +- samcli/commands/list/endpoints/command.py | 4 +- samcli/commands/list/resources/command.py | 4 +- samcli/commands/list/stack_outputs/command.py | 4 +- .../local/generate_event/event_generation.py | 4 +- samcli/commands/local/invoke/cli.py | 4 +- samcli/commands/local/start_api/cli.py | 4 +- samcli/commands/local/start_lambda/cli.py | 4 +- samcli/commands/logs/command.py | 4 +- samcli/commands/package/command.py | 4 +- samcli/commands/pipeline/bootstrap/cli.py | 4 +- samcli/commands/pipeline/init/cli.py | 4 +- samcli/commands/publish/command.py | 4 +- samcli/commands/remote/invoke/cli.py | 4 +- samcli/commands/sync/command.py | 4 +- samcli/commands/traces/command.py | 4 +- samcli/commands/validate/validate.py | 4 +- samcli/lib/config/exceptions.py | 10 +- samcli/lib/config/file_manager.py | 342 ++++++++++++++++++ samcli/lib/config/samconfig.py | 133 ++++--- samcli/lib/telemetry/event.py | 3 + .../integration/buildcmd/build_integ_base.py | 4 + .../buildcmd/test_build_samconfig.py | 114 ++++++ tests/integration/deploy/deploy_integ_base.py | 25 ++ .../integration/deploy/test_deploy_command.py | 43 ++- .../telemetry/test_experimental_metric.py | 7 +- .../telemetry/test_installed_metric.py | 4 +- .../telemetry/test_telemetry_contract.py | 6 +- .../buildcmd/samconfig/samconfig.json | 12 + .../buildcmd/samconfig/samconfig.toml | 5 + .../buildcmd/samconfig/samconfig.yaml | 7 + .../testdata/buildcmd/samconfig/samconfig.yml | 7 + .../testdata/buildcmd/samconfig/template.yaml | 37 ++ tests/unit/cli/test_cli_config_file.py | 37 +- .../commands/delete/test_delete_context.py | 8 +- .../unit/commands/samconfig/test_samconfig.py | 2 +- tests/unit/lib/samconfig/test_file_manager.py | 277 ++++++++++++++ tests/unit/lib/samconfig/test_samconfig.py | 112 +++++- 44 files changed, 1288 insertions(+), 203 deletions(-) create mode 100644 samcli/lib/config/file_manager.py create mode 100644 tests/integration/buildcmd/test_build_samconfig.py create mode 100644 tests/integration/testdata/buildcmd/samconfig/samconfig.json create mode 100644 tests/integration/testdata/buildcmd/samconfig/samconfig.toml create mode 100644 tests/integration/testdata/buildcmd/samconfig/samconfig.yaml create mode 100644 tests/integration/testdata/buildcmd/samconfig/samconfig.yml create mode 100644 tests/integration/testdata/buildcmd/samconfig/template.yaml create mode 100644 tests/unit/lib/samconfig/test_file_manager.py diff --git a/samcli/cli/cli_config_file.py b/samcli/cli/cli_config_file.py index e4606e4555..bfc295c01b 100644 --- a/samcli/cli/cli_config_file.py +++ b/samcli/cli/cli_config_file.py @@ -10,49 +10,67 @@ import logging import os from pathlib import Path +from typing import Any, Callable, Dict, List, Optional import click +from click.core import ParameterSource from samcli.cli.context import get_cmd_names from samcli.commands.exceptions import ConfigException from samcli.lib.config.samconfig import DEFAULT_CONFIG_FILE_NAME, DEFAULT_ENV, SamConfig -__all__ = ("TomlProvider", "configuration_option", "get_ctx_defaults") +__all__ = ("ConfigProvider", "configuration_option", "get_ctx_defaults") LOG = logging.getLogger(__name__) -class TomlProvider: +class ConfigProvider: """ - A parser for toml configuration files + A parser for sam configuration files """ def __init__(self, section=None, cmd_names=None): """ - The constructor for TomlProvider class - :param section: section defined in the configuration file nested within `cmd` - :param cmd_names: cmd_name defined in the configuration file + The constructor for ConfigProvider class + + Parameters + ---------- + section + The section defined in the configuration file nested within `cmd` + cmd_names + The cmd_name defined in the configuration file """ self.section = section self.cmd_names = cmd_names - def __call__(self, config_path, config_env, cmd_names): + def __call__(self, config_path: Path, config_env: str, cmd_names: List[str]) -> dict: """ Get resolved config based on the `file_path` for the configuration file, `config_env` targeted inside the config file and corresponding `cmd_name` as denoted by `click`. - :param config_path: The path of configuration file. - :param config_env: The name of the sectional config_env within configuration file. - :param list cmd_names: sam command name as defined by click - :returns dictionary containing the configuration parameters under specified config_env + Parameters + ---------- + config_path: Path + The path of configuration file. + config_env: str + The name of the sectional config_env within configuration file. + cmd_names: List[str] + The sam command name as defined by click. + + Returns + ------- + dict + A dictionary containing the configuration parameters under specified config_env. """ - resolved_config = {} + resolved_config: dict = {} # Use default sam config file name if config_path only contain the directory config_file_path = ( - Path(os.path.abspath(config_path)) if config_path else Path(os.getcwd(), DEFAULT_CONFIG_FILE_NAME) + Path(os.path.abspath(config_path)) + if config_path + else Path(os.getcwd(), SamConfig.get_default_file(os.getcwd())) ) config_file_name = config_file_path.name config_file_dir = config_file_path.parents[0] @@ -105,32 +123,56 @@ def __call__(self, config_path, config_env, cmd_names): return resolved_config -def configuration_callback(cmd_name, option_name, saved_callback, provider, ctx, param, value): +def configuration_callback( + cmd_name: str, + option_name: str, + saved_callback: Optional[Callable], + provider: Callable, + ctx: click.Context, + param: click.Parameter, + value, +): """ Callback for reading the config file. Also takes care of calling user specified custom callback afterwards. - :param cmd_name: `sam` command name derived from click. - :param option_name: The name of the option. This is used for error messages. - :param saved_callback: User-specified callback to be called later. - :param provider: A callable that parses the configuration file and returns a dictionary + Parameters + ---------- + cmd_name: str + The `sam` command name derived from click. + option_name: str + The name of the option. This is used for error messages. + saved_callback: Optional[Callable] + User-specified callback to be called later. + provider: Callable + A callable that parses the configuration file and returns a dictionary of the configuration parameters. Will be called as `provider(file_path, config_env, cmd_name)`. - :param ctx: Click context - :param param: Click parameter - :param value: Specified value for config_env - :returns specified callback or the specified value for config_env. + ctx: click.Context + Click context + param: click.Parameter + Click parameter + value + Specified value for config_env + + Returns + ------- + The specified callback or the specified value for config_env. """ # ctx, param and value are default arguments for click specified callbacks. ctx.default_map = ctx.default_map or {} - cmd_name = cmd_name or ctx.info_name + cmd_name = cmd_name or str(ctx.info_name) param.default = None config_env_name = ctx.params.get("config_env") or DEFAULT_ENV - config_file = ctx.params.get("config_file") or DEFAULT_CONFIG_FILE_NAME config_dir = getattr(ctx, "samconfig_dir", None) or os.getcwd() + config_file = ( # If given by default, check for other `samconfig` extensions first. Else use user-provided value + SamConfig.get_default_file(config_dir=config_dir) + if getattr(ctx.get_parameter_source("config_file"), "name", "") == ParameterSource.DEFAULT.name + else ctx.params.get("config_file") or SamConfig.get_default_file(config_dir=config_dir) + ) # If --config-file is an absolute path, use it, if not, start from config_dir config_file_path = config_file if os.path.isabs(config_file) else os.path.join(config_dir, config_file) if ( @@ -154,21 +196,35 @@ def configuration_callback(cmd_name, option_name, saved_callback, provider, ctx, return saved_callback(ctx, param, config_env_name) if saved_callback else config_env_name -def get_ctx_defaults(cmd_name, provider, ctx, config_env_name, config_file=None): +def get_ctx_defaults( + cmd_name: str, provider: Callable, ctx: click.Context, config_env_name: str, config_file: Optional[str] = None +) -> Any: """ Get the set of the parameters that are needed to be set into the click command. + This function also figures out the command name by looking up current click context's parent and constructing the parsed command name that is used in default configuration file. If a given cmd_name is start-api, the parsed name is "local_start_api". provider is called with `config_file`, `config_env_name` and `parsed_cmd_name`. - :param cmd_name: `sam` command name - :param provider: provider to be called for reading configuration file - :param ctx: Click context - :param config_env_name: config-env within configuration file, sam configuration file will be relative to the - supplied original template if its path is not specified - :param config_file: configuration file name - :return: dictionary of defaults for parameters + Parameters + ---------- + cmd_name: str + The `sam` command name. + provider: Callable + The provider to be called for reading configuration file. + ctx: click.Context + Click context + config_env_name: str + The config-env within configuration file, sam configuration file will be relative to the + supplied original template if its path is not specified. + config_file: Optional[str] + The configuration file name. + + Returns + ------- + Any + A dictionary of defaults for parameters. """ return provider(config_file, config_env_name, get_cmd_names(cmd_name, ctx)) @@ -180,30 +236,38 @@ def configuration_option(*param_decls, **attrs): """ Adds configuration file support to a click application. - NOTE: This decorator should be added to the top of parameter chain, right below click.command, before - any options are declared. - - Example: - >>> @click.command("hello") - @configuration_option(provider=TomlProvider(section="parameters")) - @click.option('--name', type=click.String) - def hello(name): - print("Hello " + name) - This will create a hidden click option whose callback function loads configuration parameters from default configuration environment [default] in default configuration file [samconfig.toml] in the template file directory. - :param preconfig_decorator_list: A list of click option decorator which need to place before this function. For - exmple, if we want to add option "--config-file" and "--config-env" to allow customized configuration file + + Note + ---- + This decorator should be added to the top of parameter chain, right below click.command, before + any options are declared. + + Example + ------- + >>> @click.command("hello") + @configuration_option(provider=ConfigProvider(section="parameters")) + @click.option('--name', type=click.String) + def hello(name): + print("Hello " + name) + + Parameters + ---------- + preconfig_decorator_list: list + A list of click option decorator which need to place before this function. For + example, if we want to add option "--config-file" and "--config-env" to allow customized configuration file and configuration environment, we will use configuration_option as below: @configuration_option( preconfig_decorator_list=[decorator_customize_config_file, decorator_customize_config_env], - provider=TomlProvider(section=CONFIG_SECTION), + provider=ConfigProvider(section=CONFIG_SECTION), ) By default, we enable these two options. - :param provider: A callable that parses the configuration file and returns a dictionary + provider: Callable + A callable that parses the configuration file and returns a dictionary of the configuration parameters. Will be called as - `provider(file_path, config_env, cmd_name) + `provider(file_path, config_env, cmd_name)` """ def decorator_configuration_setup(f): @@ -240,17 +304,25 @@ def decorator(f): return composed_decorator(decorator_list) -def decorator_customize_config_file(f): +def decorator_customize_config_file(f: Callable) -> Callable: """ CLI option to customize configuration file name. By default it is 'samconfig.toml' in project directory. Ex: --config-file samconfig.toml - :param f: Callback function passed by Click - :return: Callback function + + Parameters + ---------- + f: Callable + Callback function passed by Click + + Returns + ------- + Callable + A Callback function """ - config_file_attrs = {} + config_file_attrs: Dict[str, Any] = {} config_file_param_decls = ("--config-file",) config_file_attrs["help"] = "Configuration file containing default parameter values." - config_file_attrs["default"] = "samconfig.toml" + config_file_attrs["default"] = DEFAULT_CONFIG_FILE_NAME config_file_attrs["show_default"] = True config_file_attrs["is_eager"] = True config_file_attrs["required"] = False @@ -258,17 +330,25 @@ def decorator_customize_config_file(f): return click.option(*config_file_param_decls, **config_file_attrs)(f) -def decorator_customize_config_env(f): +def decorator_customize_config_env(f: Callable) -> Callable: """ CLI option to customize configuration environment name. By default it is 'default'. Ex: --config-env default - :param f: Callback function passed by Click - :return: Callback function + + Parameters + ---------- + f: Callable + Callback function passed by Click + + Returns + ------- + Callable + A Callback function """ - config_env_attrs = {} + config_env_attrs: Dict[str, Any] = {} config_env_param_decls = ("--config-env",) config_env_attrs["help"] = "Environment name specifying default parameter values in the configuration file." - config_env_attrs["default"] = "default" + config_env_attrs["default"] = DEFAULT_ENV config_env_attrs["show_default"] = True config_env_attrs["is_eager"] = True config_env_attrs["required"] = False diff --git a/samcli/commands/_utils/custom_options/hook_name_option.py b/samcli/commands/_utils/custom_options/hook_name_option.py index 745ab6c64a..a2cb334157 100644 --- a/samcli/commands/_utils/custom_options/hook_name_option.py +++ b/samcli/commands/_utils/custom_options/hook_name_option.py @@ -141,7 +141,7 @@ def _get_customer_input_beta_features_option(default_map, experimental_entry, op if beta_features is not None: return beta_features - # Get the beta-features flag value from the SamConfig toml file if provided. + # Get the beta-features flag value from the SamConfig file if provided. beta_features = default_map.get("beta_features") if beta_features is not None: return beta_features diff --git a/samcli/commands/build/command.py b/samcli/commands/build/command.py index a60f9954e1..86327d411d 100644 --- a/samcli/commands/build/command.py +++ b/samcli/commands/build/command.py @@ -27,7 +27,7 @@ from samcli.cli.main import pass_context, common_options as cli_framework_options, aws_creds_options, print_cmdline_args from samcli.commands.build.core.command import BuildCommand from samcli.lib.telemetry.metric import track_command -from samcli.cli.cli_config_file import configuration_option, TomlProvider +from samcli.cli.cli_config_file import configuration_option, ConfigProvider from samcli.lib.utils.version_checker import check_newer_version from samcli.commands.build.click_container import ContainerOptions from samcli.commands.build.utils import MountMode @@ -69,7 +69,7 @@ short_help=HELP_TEXT, context_settings={"max_content_width": 120}, ) -@configuration_option(provider=TomlProvider(section="parameters")) +@configuration_option(provider=ConfigProvider(section="parameters")) @hook_name_click_option( force_prepare=True, invalid_coexist_options=["t", "template-file", "template", "parameter-overrides"], diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py index 08d327eceb..1424c87f4a 100644 --- a/samcli/commands/delete/delete_context.py +++ b/samcli/commands/delete/delete_context.py @@ -9,7 +9,7 @@ from botocore.exceptions import NoCredentialsError, NoRegionError from click import confirm, prompt -from samcli.cli.cli_config_file import TomlProvider +from samcli.cli.cli_config_file import ConfigProvider from samcli.commands.delete.exceptions import CfDeleteFailedStatusError from samcli.commands.exceptions import AWSServiceClientError, RegionError from samcli.lib.bootstrap.companion_stack.companion_stack_builder import CompanionStack @@ -82,8 +82,8 @@ def parse_config_file(self): """ Read the provided config file if it exists and assign the options values. """ - toml_provider = TomlProvider(CONFIG_SECTION, [CONFIG_COMMAND]) - config_options = toml_provider( + config_provider = ConfigProvider(CONFIG_SECTION, [CONFIG_COMMAND]) + config_options = config_provider( config_path=self.config_file, config_env=self.config_env, cmd_names=[CONFIG_COMMAND] ) if not config_options: diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py index aeefe4d25f..557a601261 100644 --- a/samcli/commands/deploy/command.py +++ b/samcli/commands/deploy/command.py @@ -6,7 +6,7 @@ import click -from samcli.cli.cli_config_file import TomlProvider, configuration_option +from samcli.cli.cli_config_file import ConfigProvider, configuration_option from samcli.cli.main import aws_creds_options, common_options, pass_context, print_cmdline_args from samcli.commands._utils.cdk_support_decorators import unsupported_command_cdk from samcli.commands._utils.click_mutex import ClickMutex @@ -75,7 +75,7 @@ description=DESCRIPTION, requires_credentials=True, ) -@configuration_option(provider=TomlProvider(section=CONFIG_SECTION)) +@configuration_option(provider=ConfigProvider(section=CONFIG_SECTION)) @click.option( "--guided", "-g", diff --git a/samcli/commands/deploy/guided_config.py b/samcli/commands/deploy/guided_config.py index b9d8ea59b5..78866944cd 100644 --- a/samcli/commands/deploy/guided_config.py +++ b/samcli/commands/deploy/guided_config.py @@ -7,7 +7,8 @@ from samcli.cli.context import get_cmd_names from samcli.commands.deploy.exceptions import GuidedDeployFailedError -from samcli.lib.config.samconfig import DEFAULT_CONFIG_FILE_NAME, DEFAULT_ENV, SamConfig +from samcli.lib.config.exceptions import SamConfigFileReadException +from samcli.lib.config.samconfig import DEFAULT_ENV, SamConfig class GuidedConfig: @@ -19,20 +20,25 @@ def get_config_ctx(self, config_file=None): ctx = click.get_current_context() samconfig_dir = getattr(ctx, "samconfig_dir", None) + config_dir = samconfig_dir if samconfig_dir else SamConfig.config_dir(template_file_path=self.template_file) samconfig = SamConfig( - config_dir=samconfig_dir if samconfig_dir else SamConfig.config_dir(template_file_path=self.template_file), - filename=config_file or DEFAULT_CONFIG_FILE_NAME, + config_dir=config_dir, + filename=config_file or SamConfig.get_default_file(config_dir=config_dir), ) return ctx, samconfig def read_config_showcase(self, config_file=None): - _, samconfig = self.get_config_ctx(config_file) - - status = "Found" if samconfig.exists() else "Not found" msg = ( "Syntax invalid in samconfig.toml; save values " "through sam deploy --guided to overwrite file with a valid set of values." ) + try: + _, samconfig = self.get_config_ctx(config_file) + except SamConfigFileReadException: + raise GuidedDeployFailedError(msg) + + status = "Found" if samconfig.exists() else "Not found" + config_sanity = samconfig.sanity_check() click.secho("\nConfiguring SAM deploy\n======================", fg="yellow") click.echo(f"\n\tLooking for config file [{config_file}] : {status}") diff --git a/samcli/commands/init/command.py b/samcli/commands/init/command.py index 8702b3f9a9..f31e967a04 100644 --- a/samcli/commands/init/command.py +++ b/samcli/commands/init/command.py @@ -7,7 +7,7 @@ import click -from samcli.cli.cli_config_file import TomlProvider, configuration_option +from samcli.cli.cli_config_file import ConfigProvider, configuration_option from samcli.cli.main import common_options, pass_context, print_cmdline_args from samcli.commands._utils.click_mutex import ClickMutex from samcli.commands.init.core.command import InitCommand @@ -112,7 +112,7 @@ def wrapped(*args, **kwargs): description=DESCRIPTION, requires_credentials=False, ) -@configuration_option(provider=TomlProvider(section="parameters")) +@configuration_option(provider=ConfigProvider(section="parameters")) @click.option( "--no-interactive", is_flag=True, diff --git a/samcli/commands/list/endpoints/command.py b/samcli/commands/list/endpoints/command.py index 6de1c41bb2..f11543ef8c 100644 --- a/samcli/commands/list/endpoints/command.py +++ b/samcli/commands/list/endpoints/command.py @@ -4,7 +4,7 @@ import click -from samcli.cli.cli_config_file import TomlProvider, configuration_option +from samcli.cli.cli_config_file import ConfigProvider, configuration_option from samcli.cli.main import aws_creds_options, common_options, pass_context, print_cmdline_args from samcli.commands._utils.command_exception_handler import command_exception_handler from samcli.commands._utils.options import parameter_override_option, template_option_without_build @@ -21,7 +21,7 @@ @click.command(name="endpoints", help=HELP_TEXT) -@configuration_option(provider=TomlProvider(section="parameters")) +@configuration_option(provider=ConfigProvider(section="parameters")) @parameter_override_option @stack_name_option @output_option diff --git a/samcli/commands/list/resources/command.py b/samcli/commands/list/resources/command.py index 5dd2b41034..dacfac30e2 100644 --- a/samcli/commands/list/resources/command.py +++ b/samcli/commands/list/resources/command.py @@ -4,7 +4,7 @@ import click -from samcli.cli.cli_config_file import TomlProvider, configuration_option +from samcli.cli.cli_config_file import ConfigProvider, configuration_option from samcli.cli.main import aws_creds_options, common_options, pass_context, print_cmdline_args from samcli.commands._utils.command_exception_handler import command_exception_handler from samcli.commands._utils.options import parameter_override_option, template_option_without_build @@ -20,7 +20,7 @@ @click.command(name="resources", help=HELP_TEXT) -@configuration_option(provider=TomlProvider(section="parameters")) +@configuration_option(provider=ConfigProvider(section="parameters")) @parameter_override_option @stack_name_option @output_option diff --git a/samcli/commands/list/stack_outputs/command.py b/samcli/commands/list/stack_outputs/command.py index 3800c009b2..e988f98045 100644 --- a/samcli/commands/list/stack_outputs/command.py +++ b/samcli/commands/list/stack_outputs/command.py @@ -4,7 +4,7 @@ import click -from samcli.cli.cli_config_file import TomlProvider, configuration_option +from samcli.cli.cli_config_file import ConfigProvider, configuration_option from samcli.cli.main import aws_creds_options, common_options, pass_context, print_cmdline_args from samcli.commands._utils.command_exception_handler import command_exception_handler from samcli.commands.list.cli_common.options import output_option @@ -23,7 +23,7 @@ required=True, type=click.STRING, ) -@configuration_option(provider=TomlProvider(section="parameters")) +@configuration_option(provider=ConfigProvider(section="parameters")) @output_option @aws_creds_options @common_options diff --git a/samcli/commands/local/generate_event/event_generation.py b/samcli/commands/local/generate_event/event_generation.py index 5715bded73..9bf8e49e7a 100644 --- a/samcli/commands/local/generate_event/event_generation.py +++ b/samcli/commands/local/generate_event/event_generation.py @@ -6,7 +6,7 @@ import click -from samcli.cli.cli_config_file import TomlProvider, configuration_option +from samcli.cli.cli_config_file import ConfigProvider, configuration_option from samcli.cli.options import debug_option from samcli.lib.generated_sample_events import events from samcli.lib.telemetry.metric import track_command @@ -160,7 +160,7 @@ def get_command(self, ctx, cmd_name): callback=command_callback, ) - cmd = configuration_option(provider=TomlProvider(section="parameters"))(debug_option(cmd)) + cmd = configuration_option(provider=ConfigProvider(section="parameters"))(debug_option(cmd)) return cmd def list_commands(self, ctx): diff --git a/samcli/commands/local/invoke/cli.py b/samcli/commands/local/invoke/cli.py index a9a3fc9571..0442e7b7ed 100644 --- a/samcli/commands/local/invoke/cli.py +++ b/samcli/commands/local/invoke/cli.py @@ -6,7 +6,7 @@ import click -from samcli.cli.cli_config_file import TomlProvider, configuration_option +from samcli.cli.cli_config_file import ConfigProvider, configuration_option from samcli.cli.main import aws_creds_options, pass_context, print_cmdline_args from samcli.cli.main import common_options as cli_framework_options from samcli.commands._utils.experimental import ExperimentalFlag, is_experimental_enabled @@ -43,7 +43,7 @@ short_help=HELP_TEXT, context_settings={"max_content_width": 120}, ) -@configuration_option(provider=TomlProvider(section="parameters")) +@configuration_option(provider=ConfigProvider(section="parameters")) @hook_name_click_option( force_prepare=False, invalid_coexist_options=["t", "template-file", "template", "parameter-overrides"] ) diff --git a/samcli/commands/local/start_api/cli.py b/samcli/commands/local/start_api/cli.py index 9de4d7982c..3e2f02b5f3 100644 --- a/samcli/commands/local/start_api/cli.py +++ b/samcli/commands/local/start_api/cli.py @@ -6,7 +6,7 @@ import click -from samcli.cli.cli_config_file import TomlProvider, configuration_option +from samcli.cli.cli_config_file import ConfigProvider, configuration_option from samcli.cli.main import aws_creds_options, pass_context, print_cmdline_args from samcli.cli.main import common_options as cli_framework_options from samcli.commands._utils.experimental import ExperimentalFlag, is_experimental_enabled @@ -58,7 +58,7 @@ requires_credentials=False, context_settings={"max_content_width": 120}, ) -@configuration_option(provider=TomlProvider(section="parameters")) +@configuration_option(provider=ConfigProvider(section="parameters")) @hook_name_click_option( force_prepare=False, invalid_coexist_options=["t", "template-file", "template", "parameter-overrides"] ) diff --git a/samcli/commands/local/start_lambda/cli.py b/samcli/commands/local/start_lambda/cli.py index 9aaec50976..ded8b786fc 100644 --- a/samcli/commands/local/start_lambda/cli.py +++ b/samcli/commands/local/start_lambda/cli.py @@ -6,7 +6,7 @@ import click -from samcli.cli.cli_config_file import TomlProvider, configuration_option +from samcli.cli.cli_config_file import ConfigProvider, configuration_option from samcli.cli.main import aws_creds_options, pass_context, print_cmdline_args from samcli.cli.main import common_options as cli_framework_options from samcli.commands._utils.experimental import ExperimentalFlag, is_experimental_enabled @@ -52,7 +52,7 @@ requires_credentials=False, context_settings={"max_content_width": 120}, ) -@configuration_option(provider=TomlProvider(section="parameters")) +@configuration_option(provider=ConfigProvider(section="parameters")) @hook_name_click_option( force_prepare=False, invalid_coexist_options=["t", "template-file", "template", "parameter-overrides"] ) diff --git a/samcli/commands/logs/command.py b/samcli/commands/logs/command.py index 1146767a60..f8df636b59 100644 --- a/samcli/commands/logs/command.py +++ b/samcli/commands/logs/command.py @@ -6,7 +6,7 @@ import click -from samcli.cli.cli_config_file import TomlProvider, configuration_option +from samcli.cli.cli_config_file import ConfigProvider, configuration_option from samcli.cli.main import aws_creds_options, pass_context, print_cmdline_args from samcli.cli.main import common_options as cli_framework_options from samcli.commands._utils.command_exception_handler import command_exception_handler @@ -49,7 +49,7 @@ description=DESCRIPTION, requires_credentials=True, ) -@configuration_option(provider=TomlProvider(section="parameters")) +@configuration_option(provider=ConfigProvider(section="parameters")) @click.option( "--name", "-n", diff --git a/samcli/commands/package/command.py b/samcli/commands/package/command.py index 41fb10b133..151661c91e 100644 --- a/samcli/commands/package/command.py +++ b/samcli/commands/package/command.py @@ -3,7 +3,7 @@ """ import click -from samcli.cli.cli_config_file import TomlProvider, configuration_option +from samcli.cli.cli_config_file import ConfigProvider, configuration_option from samcli.cli.main import aws_creds_options, common_options, pass_context, print_cmdline_args from samcli.commands._utils.cdk_support_decorators import unsupported_command_cdk from samcli.commands._utils.command_exception_handler import command_exception_handler @@ -67,7 +67,7 @@ def resources_and_properties_help_string(): description=DESCRIPTION, requires_credentials=True, ) -@configuration_option(provider=TomlProvider(section="parameters")) +@configuration_option(provider=ConfigProvider(section="parameters")) @template_click_option(include_build=True) @click.option( "--output-template-file", diff --git a/samcli/commands/pipeline/bootstrap/cli.py b/samcli/commands/pipeline/bootstrap/cli.py index 9b7db02578..7ba17e43fe 100644 --- a/samcli/commands/pipeline/bootstrap/cli.py +++ b/samcli/commands/pipeline/bootstrap/cli.py @@ -7,7 +7,7 @@ import click -from samcli.cli.cli_config_file import TomlProvider, configuration_option +from samcli.cli.cli_config_file import ConfigProvider, configuration_option from samcli.cli.main import aws_creds_options, common_options, pass_context, print_cmdline_args from samcli.commands._utils.click_mutex import ClickMutex from samcli.commands._utils.command_exception_handler import command_exception_handler @@ -38,7 +38,7 @@ @click.command("bootstrap", short_help=SHORT_HELP, help=HELP_TEXT, context_settings=dict(max_content_width=120)) -@configuration_option(provider=TomlProvider(section="parameters")) +@configuration_option(provider=ConfigProvider(section="parameters")) @click.option( "--interactive/--no-interactive", is_flag=True, diff --git a/samcli/commands/pipeline/init/cli.py b/samcli/commands/pipeline/init/cli.py index 9e42f6e74b..ec675fb608 100644 --- a/samcli/commands/pipeline/init/cli.py +++ b/samcli/commands/pipeline/init/cli.py @@ -5,7 +5,7 @@ import click -from samcli.cli.cli_config_file import TomlProvider, configuration_option +from samcli.cli.cli_config_file import ConfigProvider, configuration_option from samcli.cli.main import common_options as cli_framework_options from samcli.cli.main import pass_context from samcli.commands._utils.command_exception_handler import command_exception_handler @@ -24,7 +24,7 @@ @click.command("init", help=HELP_TEXT, short_help=SHORT_HELP) -@configuration_option(provider=TomlProvider(section="parameters")) +@configuration_option(provider=ConfigProvider(section="parameters")) @click.option( "--bootstrap", is_flag=True, diff --git a/samcli/commands/publish/command.py b/samcli/commands/publish/command.py index 091a044192..4ebfb3e8e5 100644 --- a/samcli/commands/publish/command.py +++ b/samcli/commands/publish/command.py @@ -7,7 +7,7 @@ import click from serverlessrepo.publish import CREATE_APPLICATION -from samcli.cli.cli_config_file import TomlProvider, configuration_option +from samcli.cli.cli_config_file import ConfigProvider, configuration_option from samcli.cli.main import aws_creds_options, pass_context, print_cmdline_args from samcli.cli.main import common_options as cli_framework_options from samcli.commands._utils.command_exception_handler import command_exception_handler @@ -44,7 +44,7 @@ @click.command("publish", help=HELP_TEXT, short_help=SHORT_HELP) -@configuration_option(provider=TomlProvider(section="parameters")) +@configuration_option(provider=ConfigProvider(section="parameters")) @template_common_option @click.option("--semantic-version", help=SEMANTIC_VERSION_HELP) @aws_creds_options diff --git a/samcli/commands/remote/invoke/cli.py b/samcli/commands/remote/invoke/cli.py index 3f3a771ea1..0318566b4a 100644 --- a/samcli/commands/remote/invoke/cli.py +++ b/samcli/commands/remote/invoke/cli.py @@ -4,7 +4,7 @@ import click -from samcli.cli.cli_config_file import TomlProvider, configuration_option +from samcli.cli.cli_config_file import ConfigProvider, configuration_option from samcli.cli.context import Context from samcli.cli.main import aws_creds_options, common_options, pass_context, print_cmdline_args from samcli.cli.types import RemoteInvokeOutputFormatType @@ -42,7 +42,7 @@ requires_credentials=True, context_settings={"max_content_width": 120}, ) -@configuration_option(provider=TomlProvider(section="parameters")) +@configuration_option(provider=ConfigProvider(section="parameters")) @click.option("--stack-name", required=False, help="Name of the stack to get the resource information from") @click.argument("resource-id", required=False) @click.option( diff --git a/samcli/commands/sync/command.py b/samcli/commands/sync/command.py index 81f1222207..ddc5e2a165 100644 --- a/samcli/commands/sync/command.py +++ b/samcli/commands/sync/command.py @@ -5,7 +5,7 @@ import click -from samcli.cli.cli_config_file import TomlProvider, configuration_option +from samcli.cli.cli_config_file import ConfigProvider, configuration_option from samcli.cli.context import Context from samcli.cli.main import aws_creds_options, pass_context, print_cmdline_args from samcli.cli.main import common_options as cli_framework_options @@ -114,7 +114,7 @@ requires_credentials=True, context_settings={"max_content_width": 120}, ) -@configuration_option(provider=TomlProvider(section="parameters")) +@configuration_option(provider=ConfigProvider(section="parameters")) @template_option_without_build @click.option( "--code", diff --git a/samcli/commands/traces/command.py b/samcli/commands/traces/command.py index 183a2bd156..d82b6871ab 100644 --- a/samcli/commands/traces/command.py +++ b/samcli/commands/traces/command.py @@ -5,7 +5,7 @@ import click -from samcli.cli.cli_config_file import TomlProvider, configuration_option +from samcli.cli.cli_config_file import ConfigProvider, configuration_option from samcli.cli.main import aws_creds_options, pass_context, print_cmdline_args from samcli.cli.main import common_options as cli_framework_options from samcli.commands._utils.command_exception_handler import command_exception_handler @@ -28,7 +28,7 @@ @click.command("traces", help=HELP_TEXT, short_help="Fetch AWS X-Ray traces") -@configuration_option(provider=TomlProvider(section="parameters")) +@configuration_option(provider=ConfigProvider(section="parameters")) @click.option( "--trace-id", "-ti", diff --git a/samcli/commands/validate/validate.py b/samcli/commands/validate/validate.py index 1c5c2b28b1..221284b1ac 100644 --- a/samcli/commands/validate/validate.py +++ b/samcli/commands/validate/validate.py @@ -8,7 +8,7 @@ from botocore.exceptions import NoCredentialsError from samtranslator.translator.arn_generator import NoRegionFound -from samcli.cli.cli_config_file import TomlProvider, configuration_option +from samcli.cli.cli_config_file import ConfigProvider, configuration_option from samcli.cli.context import Context from samcli.cli.main import aws_creds_options, pass_context, print_cmdline_args from samcli.cli.main import common_options as cli_framework_options @@ -35,7 +35,7 @@ requires_credentials=False, context_settings={"max_content_width": 120}, ) -@configuration_option(provider=TomlProvider(section="parameters")) +@configuration_option(provider=ConfigProvider(section="parameters")) @template_option_without_build @aws_creds_options @cli_framework_options diff --git a/samcli/lib/config/exceptions.py b/samcli/lib/config/exceptions.py index 50297ce722..c179b4a13c 100644 --- a/samcli/lib/config/exceptions.py +++ b/samcli/lib/config/exceptions.py @@ -4,4 +4,12 @@ class SamConfigVersionException(Exception): - pass + """Exception for the `samconfig` file being not present or in unrecognized format""" + + +class FileParseException(Exception): + """Exception when a file is incorrectly parsed by a FileManager object.""" + + +class SamConfigFileReadException(Exception): + """Exception when a `samconfig` file is read incorrectly.""" diff --git a/samcli/lib/config/file_manager.py b/samcli/lib/config/file_manager.py new file mode 100644 index 0000000000..0629ace318 --- /dev/null +++ b/samcli/lib/config/file_manager.py @@ -0,0 +1,342 @@ +""" +Class to represent the parsing of different file types into Python objects. +""" + + +import json +import logging +from abc import ABC, abstractmethod +from pathlib import Path +from typing import Any, Dict, Type + +import tomlkit +from ruamel.yaml import YAML, YAMLError +from ruamel.yaml.compat import StringIO + +from samcli.lib.config.exceptions import FileParseException + +LOG = logging.getLogger(__name__) +COMMENT_KEY = "__comment__" + + +class FileManager(ABC): + """ + Abstract class to be overridden by file managers for specific file extensions. + """ + + @staticmethod + @abstractmethod + def read(filepath: Path) -> Any: + """ + Read a file at a given path. + + Parameters + ---------- + filepath: Path + The Path object that points to the file to be read. + + Returns + ------- + Any + A dictionary-like representation of the contents at the filepath location. + """ + raise NotImplementedError("Read method not implemented.") + + @staticmethod + @abstractmethod + def write(document: dict, filepath: Path): + """ + Write a dictionary or dictionary-like object to a given file. + + Parameters + ---------- + document: dict + The object to write. + filepath: Path + The final location for the document to be written. + """ + raise NotImplementedError("Write method not implemented.") + + @staticmethod + @abstractmethod + def put_comment(document: Any, comment: str) -> Any: + """ + Put a comment in a document object. + + Parameters + ---------- + document: Any + The object to write + comment: str + The comment to include in the document. + + Returns + ------- + Any + The new document, with the comment added to it. + """ + raise NotImplementedError("Put comment method not implemented.") + + +class TomlFileManager(FileManager): + """ + Static class to read and write toml files. + """ + + file_format = "TOML" + + @staticmethod + def read(filepath: Path) -> Any: + """ + Read a TOML file at the given path. + + Parameters + ---------- + filepath: Path + The Path object that points to the file to be read. + + Returns + ------- + Any + A dictionary-like tomlkit.TOMLDocument object, which represents the contents of the TOML file at the + provided location. + """ + toml_doc = tomlkit.document() + try: + txt = filepath.read_text() + toml_doc = tomlkit.loads(txt) + except OSError as e: + LOG.debug(f"OSError occurred while reading {TomlFileManager.file_format} file: {str(e)}") + except tomlkit.exceptions.TOMLKitError as e: + raise FileParseException(e) from e + + return toml_doc + + @staticmethod + def write(document: dict, filepath: Path): + """ + Write the contents of a dictionary or tomlkit.TOMLDocument to a TOML file at the provided location. + + Parameters + ---------- + document: dict + The object to write. + filepath: Path + The final location for the TOML file to be written. + """ + if not document: + LOG.debug("Nothing for TomlFileManager to write.") + return + + toml_document = TomlFileManager._to_toml(document) + + if toml_document.get(COMMENT_KEY, None): # Remove dunder comments that may be residue from other formats + toml_document.add(tomlkit.comment(toml_document.get(COMMENT_KEY, ""))) + toml_document.pop(COMMENT_KEY) + + filepath.write_text(tomlkit.dumps(toml_document)) + + @staticmethod + def put_comment(document: dict, comment: str) -> Any: + """ + Put a comment in a document object. + + Parameters + ---------- + document: Any + The tomlkit.TOMLDocument object to write + comment: str + The comment to include in the document. + + Returns + ------- + Any + The new TOMLDocument, with the comment added to it. + """ + document = TomlFileManager._to_toml(document) + document.add(tomlkit.comment(comment)) + return document + + @staticmethod + def _to_toml(document: dict) -> tomlkit.TOMLDocument: + """Ensure that a dictionary-like object is a TOMLDocument.""" + return tomlkit.parse(tomlkit.dumps(document)) + + +class YamlFileManager(FileManager): + """ + Static class to read and write yaml files. + """ + + yaml = YAML() + file_format = "YAML" + + @staticmethod + def read(filepath: Path) -> Any: + """ + Read a YAML file at the given path. + + Parameters + ---------- + filepath: Path + The Path object that points to the file to be read. + + Returns + ------- + Any + A dictionary-like yaml object, which represents the contents of the YAML file at the + provided location. + """ + yaml_doc = {} + try: + yaml_doc = YamlFileManager.yaml.load(filepath.read_text()) + except OSError as e: + LOG.debug(f"OSError occurred while reading {YamlFileManager.file_format} file: {str(e)}") + except YAMLError as e: + raise FileParseException(e) from e + + return yaml_doc + + @staticmethod + def write(document: dict, filepath: Path): + """ + Write the contents of a dictionary to a YAML file at the provided location. + + Parameters + ---------- + document: dict + The object to write. + filepath: Path + The final location for the YAML file to be written. + """ + if not document: + LOG.debug("No document given to YamlFileManager to write.") + return + + yaml_doc = YamlFileManager._to_yaml(document) + + if yaml_doc.get(COMMENT_KEY, None): # Comment appears at the top of doc + yaml_doc.yaml_set_start_comment(document[COMMENT_KEY]) + yaml_doc.pop(COMMENT_KEY) + + YamlFileManager.yaml.dump(yaml_doc, filepath) + + @staticmethod + def put_comment(document: Any, comment: str) -> Any: + """ + Put a comment in a document object. + + Parameters + ---------- + document: Any + The yaml object to write + comment: str + The comment to include in the document. + + Returns + ------- + Any + The new yaml document, with the comment added to it. + """ + document = YamlFileManager._to_yaml(document) + document.yaml_set_start_comment(comment) + return document + + @staticmethod + def _to_yaml(document: dict) -> Any: + """ + Ensure a dictionary-like object is a YAML document. + + Parameters + ---------- + document: dict + A dictionary-like object to parse. + + Returns + ------- + Any + A dictionary-like YAML object, as derived from `yaml.load()`. + """ + with StringIO() as stream: + YamlFileManager.yaml.dump(document, stream) + return YamlFileManager.yaml.load(stream.getvalue()) + + +class JsonFileManager(FileManager): + """ + Static class to read and write json files. + """ + + file_format = "JSON" + INDENT_SIZE = 2 + + @staticmethod + def read(filepath: Path) -> Any: + """ + Read a JSON file at a given path. + + Parameters + ---------- + filepath: Path + The Path object that points to the file to be read. + + Returns + ------- + Any + A dictionary representation of the contents of the JSON document. + """ + json_file = {} + try: + json_file = json.loads(filepath.read_text()) + except OSError as e: + LOG.debug(f"OSError occurred while reading {JsonFileManager.file_format} file: {str(e)}") + except json.JSONDecodeError as e: + raise FileParseException(e) from e + return json_file + + @staticmethod + def write(document: dict, filepath: Path): + """ + Write a dictionary or dictionary-like object to a JSON file. + + Parameters + ---------- + document: dict + The JSON object to write. + filepath: Path + The final location for the document to be written. + """ + if not document: + LOG.debug("No document given to JsonFileManager to write.") + return + + with filepath.open("w") as file: + json.dump(document, file, indent=JsonFileManager.INDENT_SIZE) + + @staticmethod + def put_comment(document: Any, comment: str) -> Any: + """ + Put a comment in a JSON object. + + Parameters + ---------- + document: Any + The JSON object to write + comment: str + The comment to include in the document. + + Returns + ------- + Any + The new JSON dictionary object, with the comment added to it. + """ + document.update({COMMENT_KEY: comment}) + return document + + +FILE_MANAGER_MAPPER: Dict[str, Type[FileManager]] = { # keys ordered by priority + ".toml": TomlFileManager, + ".yaml": YamlFileManager, + ".yml": YamlFileManager, + # ".json": JsonFileManager, # JSON support disabled +} diff --git a/samcli/lib/config/samconfig.py b/samcli/lib/config/samconfig.py index e48e53d625..e9acafd557 100644 --- a/samcli/lib/config/samconfig.py +++ b/samcli/lib/config/samconfig.py @@ -7,26 +7,25 @@ from pathlib import Path from typing import Any, Iterable -import tomlkit - -from samcli.lib.config.exceptions import SamConfigVersionException +from samcli.lib.config.exceptions import FileParseException, SamConfigFileReadException, SamConfigVersionException +from samcli.lib.config.file_manager import FILE_MANAGER_MAPPER from samcli.lib.config.version import SAM_CONFIG_VERSION, VERSION_KEY +from samcli.lib.telemetry.event import EventTracker LOG = logging.getLogger(__name__) -DEFAULT_CONFIG_FILE_EXTENSION = "toml" -DEFAULT_CONFIG_FILE_NAME = f"samconfig.{DEFAULT_CONFIG_FILE_EXTENSION}" +DEFAULT_CONFIG_FILE_EXTENSION = ".toml" +DEFAULT_CONFIG_FILE = "samconfig" +DEFAULT_CONFIG_FILE_NAME = DEFAULT_CONFIG_FILE + DEFAULT_CONFIG_FILE_EXTENSION DEFAULT_ENV = "default" DEFAULT_GLOBAL_CMDNAME = "global" class SamConfig: """ - Class to interface with `samconfig.toml` file. + Class to represent `samconfig` config options. """ - document = None - def __init__(self, config_dir, filename=None): """ Initialize the class @@ -39,11 +38,23 @@ def __init__(self, config_dir, filename=None): Optional. Name of the configuration file. It is recommended to stick with default so in the future we could automatically support auto-resolving multiple config files within same directory. """ - self.filepath = Path(config_dir, filename or DEFAULT_CONFIG_FILE_NAME) + self.document = {} + self.filepath = Path(config_dir, filename or self.get_default_file(config_dir=config_dir)) + file_extension = self.filepath.suffix + self.file_manager = FILE_MANAGER_MAPPER.get(file_extension, None) + if not self.file_manager: + LOG.warning( + f"The config file extension '{file_extension}' is not supported. " + f"Supported formats are: [{'|'.join(FILE_MANAGER_MAPPER.keys())}]" + ) + raise SamConfigFileReadException( + f"The config file {self.filepath} uses an unsupported extension, and cannot be read." + ) + self._read() + EventTracker.track_event("SamConfigFileExtension", file_extension) def get_stage_configuration_names(self): - self._read() - if isinstance(self.document, dict): + if self.document: return [stage for stage, value in self.document.items() if isinstance(value, dict)] return [] @@ -69,23 +80,19 @@ def get_all(self, cmd_names, section, env=DEFAULT_ENV): ------ KeyError If the config file does *not* have the specific section - - tomlkit.exceptions.TOMLKitError - If the configuration file is invalid """ env = env or DEFAULT_ENV - self._read() - if isinstance(self.document, dict): - toml_content = self.document.get(env, {}) - params = toml_content.get(self._to_key(cmd_names), {}).get(section, {}) - if DEFAULT_GLOBAL_CMDNAME in toml_content: - global_params = toml_content.get(DEFAULT_GLOBAL_CMDNAME, {}).get(section, {}) - global_params.update(params.copy()) - params = global_params.copy() - return params - return {} + self.document = self._read() + + config_content = self.document.get(env, {}) + params = config_content.get(self._to_key(cmd_names), {}).get(section, {}) + if DEFAULT_GLOBAL_CMDNAME in config_content: + global_params = config_content.get(DEFAULT_GLOBAL_CMDNAME, {}).get(section, {}) + global_params.update(params.copy()) + params = global_params.copy() + return params def put(self, cmd_names, section, key, value, env=DEFAULT_ENV): """ @@ -102,20 +109,10 @@ def put(self, cmd_names, section, key, value, env=DEFAULT_ENV): key : str Key to write the data under value : Any - Value to write. Could be any of the supported TOML types. + Value to write. Could be any of the supported types. env : str Optional, Name of the environment - - Raises - ------ - tomlkit.exceptions.TOMLKitError - If the data is invalid """ - - if self.document is None: - # Checking for None here since a TOMLDocument can include a - # 'body' property but still be falsy without a 'value' property - self._read() # Empty document prepare the initial structure. # self.document is a nested dict, we need to check each layer and add new tables, otherwise duplicated key # in parent layer will override the whole child layer @@ -144,20 +141,12 @@ def put_comment(self, comment): comment: str A comment to write to the samconfg file """ - if self.document is None: - self._read() - self.document.add(tomlkit.comment(comment)) + self.document = self.file_manager.put_comment(self.document, comment) def flush(self): """ Write the data back to file - - Raises - ------ - tomlkit.exceptions.TOMLKitError - If the data is invalid - """ self._write() @@ -167,7 +156,7 @@ def sanity_check(self): """ try: self._read() - except tomlkit.exceptions.TOMLKitError: + except SamConfigFileReadException: return False else: return True @@ -196,13 +185,10 @@ def config_dir(template_file_path=None): def _read(self): if not self.document: try: - txt = self.filepath.read_text() - self.document = tomlkit.loads(txt) - self._version_sanity_check(self._version()) - except OSError: - self.document = tomlkit.document() - - if self.document.body: + self.document = self.file_manager.read(self.filepath) + except FileParseException as e: + raise SamConfigFileReadException(e) from e + if self.document: self._version_sanity_check(self._version()) return self.document @@ -213,12 +199,9 @@ def _write(self): self._ensure_exists() current_version = self._version() if self._version() else SAM_CONFIG_VERSION - try: - self.document.add(VERSION_KEY, current_version) - except tomlkit.exceptions.KeyAlreadyPresent: - # NOTE(TheSriram): Do not attempt to re-write an existing version - pass - self.filepath.write_text(tomlkit.dumps(self.document)) + self.document.update({VERSION_KEY: current_version}) + + self.file_manager.write(self.document, self.filepath) def _version(self): return self.document.get(VERSION_KEY, None) @@ -261,6 +244,40 @@ def _deduplicate_global_parameters(self, cmd_name_key, section, key, env=DEFAULT # Only keep the global parameter del self.document[env][cmd_name_key][section][key] + @staticmethod + def get_default_file(config_dir: str) -> str: + """Return a defaultly-named config file, if it exists, otherwise the current default. + + Parameters + ---------- + config_dir: str + The name of the directory where the config file is/will be stored. + + Returns + ------- + str + The name of the config file found, if it exists. In the case that it does not exist, the default config + file name is returned instead. + """ + config_files_found = 0 + config_file = DEFAULT_CONFIG_FILE_NAME + + for extension in reversed(list(FILE_MANAGER_MAPPER.keys())): + filename = DEFAULT_CONFIG_FILE + extension + if Path(config_dir, filename).exists(): + config_files_found += 1 + config_file = filename + + if config_files_found == 0: # Config file doesn't exist (yet!) + LOG.debug("No config file found in this directory.") + elif config_files_found > 1: # Multiple config files; let user know which is used + LOG.info( + f"More than one samconfig file found; using {config_file}." + f" To use another config file, please specify it using the '--config-file' flag." + ) + + return config_file + @staticmethod def _version_sanity_check(version: Any) -> None: if not isinstance(version, float): diff --git a/samcli/lib/telemetry/event.py b/samcli/lib/telemetry/event.py index 2d819e37bf..2c35748951 100644 --- a/samcli/lib/telemetry/event.py +++ b/samcli/lib/telemetry/event.py @@ -11,6 +11,7 @@ from samcli.cli.context import Context from samcli.lib.build.workflows import ALL_CONFIGS +from samcli.lib.config.file_manager import FILE_MANAGER_MAPPER from samcli.lib.telemetry.telemetry import Telemetry from samcli.local.common.runtime_template import INIT_RUNTIMES @@ -26,6 +27,7 @@ class EventName(Enum): SYNC_FLOW_START = "SyncFlowStart" SYNC_FLOW_END = "SyncFlowEnd" BUILD_WORKFLOW_USED = "BuildWorkflowUsed" + CONFIG_FILE_EXTENSION = "SamConfigFileExtension" class UsedFeature(Enum): @@ -69,6 +71,7 @@ class EventType: EventName.SYNC_FLOW_START: _SYNC_FLOWS, EventName.SYNC_FLOW_END: _SYNC_FLOWS, EventName.BUILD_WORKFLOW_USED: _WORKFLOWS, + EventName.CONFIG_FILE_EXTENSION: list(FILE_MANAGER_MAPPER.keys()), } @staticmethod diff --git a/tests/integration/buildcmd/build_integ_base.py b/tests/integration/buildcmd/build_integ_base.py index 13ba14f310..2dab556843 100644 --- a/tests/integration/buildcmd/build_integ_base.py +++ b/tests/integration/buildcmd/build_integ_base.py @@ -83,6 +83,7 @@ def get_command_list( beta_features=None, build_in_source=None, mount_with=None, + config_file=None, ): command_list = [self.cmd, "build"] @@ -146,6 +147,9 @@ def get_command_list( if build_in_source is not None: command_list += ["--build-in-source"] if build_in_source else ["--no-build-in-source"] + if config_file is not None: + command_list += ["--config-file", config_file] + return command_list def verify_docker_container_cleanedup(self, runtime): diff --git a/tests/integration/buildcmd/test_build_samconfig.py b/tests/integration/buildcmd/test_build_samconfig.py new file mode 100644 index 0000000000..3df5052599 --- /dev/null +++ b/tests/integration/buildcmd/test_build_samconfig.py @@ -0,0 +1,114 @@ +import os +from pathlib import Path +from parameterized import parameterized, parameterized_class + +from tests.integration.buildcmd.build_integ_base import BuildIntegBase +from tests.testing_utils import run_command + + +configs = { + ".toml": "samconfig/samconfig.toml", + ".yaml": "samconfig/samconfig.yaml", + ".yml": "samconfig/samconfig.yml", + ".json": "samconfig/samconfig.json", +} + + +class TestSamConfigWithBuild(BuildIntegBase): + @parameterized.expand( + [ + (".toml"), + (".yaml"), + # (".json"), + ] + ) + def test_samconfig_works_with_extension(self, extension): + cmdlist = self.get_command_list(config_file=configs[extension]) + + command_result = run_command(cmdlist, cwd=self.working_dir) + stdout = str(command_result[1]) + stderr = str(command_result[2]) + + self.assertEqual(command_result.process.returncode, 0, "Build should succeed") + self.assertIn( + f"Built Artifacts : {extension}", + stdout, + f"Build template should use build_dir from samconfig{extension}", + ) + self.assertIn("Starting Build use cache", stderr, f"'cache'=true should be set in samconfig{extension}") + + @parameterized.expand( + [ + (".toml"), + (".yaml"), + # (".json"), + ] + ) + def test_samconfig_parameters_are_overridden(self, extension): + overrides = {"Runtime": "python3.8"} + overridden_build_dir = f"override_{extension}" + + cmdlist = self.get_command_list( + config_file=configs[extension], parameter_overrides=overrides, build_dir=overridden_build_dir + ) + + command_result = run_command(cmdlist, cwd=self.working_dir) + stdout = str(command_result[1]) + stderr = str(command_result[2]) + + self.assertEqual(command_result.process.returncode, 0, "Build should succeed") + self.assertNotIn( + f"Built Artifacts : {extension}", + stdout, + f"Build template should not use build_dir from samconfig{extension}", + ) + self.assertIn( + f"Built Artifacts : {overridden_build_dir}", stdout, f"Build template should use overridden build_dir" + ) + self.assertIn("Starting Build use cache", stderr, f"'cache'=true should be set in samconfig{extension}") + self.assertNotIn("python3.9", stderr, f"parameter_overrides runtime should not read from samconfig{extension}") + self.assertIn(overrides["Runtime"], stderr, "parameter_overrides should use overridden runtime") + self.assertNotIn("SomeURI", stderr, f"parameter_overrides should not read ANY values from samconfig{extension}") + + +@parameterized_class( + [ # Ordered by expected priority + {"extensions": [".toml", ".yaml", ".yml"]}, + {"extensions": [".yaml", ".yml"]}, + ] +) +class TestSamConfigExtensionHierarchy(BuildIntegBase): + def setUp(self): + super().setUp() + new_template_location = Path(self.working_dir, "template.yaml") + new_template_location.write_text(Path(self.template_path).read_text()) + for extension in self.extensions: + config_contents = Path(self.test_data_path, configs[extension]).read_text() + new_path = Path(self.working_dir, f"samconfig{extension}") + new_path.write_text(config_contents) + self.assertTrue(new_path.exists(), f"File samconfig{extension} should have been created in cwd") + + def tearDown(self): + for extension in self.extensions: + config_path = Path(self.working_dir, f"samconfig{extension}") + os.remove(config_path) + super().tearDown() + + def test_samconfig_pulls_correct_file_if_multiple(self): + self.template_path = str(Path(self.working_dir, "template.yaml")) + cmdlist = self.get_command_list(debug=True) + command_result = run_command(cmdlist, cwd=self.working_dir) + stdout = str(command_result[1]) + + self.assertEqual(command_result.process.returncode, 0, "Build should succeed") + self.assertIn( + f" {self.extensions[0]}", + stdout, + f"samconfig{self.extensions[0]} should take priority in current test group", + ) + for other_extension in self.extensions[1:]: + self.assertNotIn( + f" {other_extension}", + stdout, + f"samconfig{other_extension} should not be read over another, higher priority extension", + ) diff --git a/tests/integration/deploy/deploy_integ_base.py b/tests/integration/deploy/deploy_integ_base.py index f48eae9d61..4db4e5ff6a 100644 --- a/tests/integration/deploy/deploy_integ_base.py +++ b/tests/integration/deploy/deploy_integ_base.py @@ -2,11 +2,13 @@ import tempfile from pathlib import Path from enum import Enum, auto +from typing import List, Optional import boto3 from botocore.config import Config from samcli.lib.bootstrap.bootstrap import SAM_CLI_STACK_NAME +from samcli.lib.config.samconfig import SamConfig from tests.integration.package.package_integ_base import PackageIntegBase from tests.testing_utils import get_sam_command, run_command, run_command_with_input @@ -212,3 +214,26 @@ def get_minimal_build_command_list(template_file=None, build_dir=None): command_list = command_list + ["--build-dir", str(build_dir)] return command_list + + def _assert_deploy_samconfig_parameters( + self, + config: SamConfig, + stack_name: str = SAM_CLI_STACK_NAME, + resolve_s3: bool = True, + region: str = "us-east-1", + capabilities: str = "CAPABILITY_IAM", + confirm_changeset: Optional[bool] = None, + parameter_overrides: Optional[str] = None, + ): + params = config.document["default"]["deploy"]["parameters"] + + self.assertEqual(params["stack_name"], stack_name) + self.assertEqual(params["resolve_s3"], resolve_s3) + self.assertEqual(params["region"], region) + self.assertEqual(params["capabilities"], capabilities) + + if confirm_changeset is not None: + self.assertEqual(params["confirm_changeset"], confirm_changeset) + + if parameter_overrides is not None: + self.assertEqual(params["parameter_overrides"], parameter_overrides) diff --git a/tests/integration/deploy/test_deploy_command.py b/tests/integration/deploy/test_deploy_command.py index 131d3969ab..a7ece04a5c 100644 --- a/tests/integration/deploy/test_deploy_command.py +++ b/tests/integration/deploy/test_deploy_command.py @@ -10,7 +10,7 @@ from parameterized import parameterized from samcli.lib.bootstrap.bootstrap import SAM_CLI_STACK_NAME -from samcli.lib.config.samconfig import DEFAULT_CONFIG_FILE_NAME +from samcli.lib.config.samconfig import DEFAULT_CONFIG_FILE_NAME, SamConfig from tests.integration.deploy.deploy_integ_base import DeployIntegBase from tests.testing_utils import RUNNING_ON_CI, RUNNING_TEST_FOR_MASTER_ON_CI, RUN_BY_CANARY, UpdatableSARTemplate @@ -613,6 +613,13 @@ def test_deploy_guided_zip(self, template_file): # Deploy should succeed with a managed stack self.assertEqual(deploy_process_execute.process.returncode, 0) self.stacks.append({"name": SAM_CLI_STACK_NAME}) + # Verify the contents in samconfig + config = SamConfig(self.test_data_path) + deploy_config_params = config.document["default"]["deploy"]["parameters"] + self.assertEqual(deploy_config_params["stack_name"], stack_name) + self.assertTrue(deploy_config_params["resolve_s3"]) + self.assertEqual(deploy_config_params["region"], "us-east-1") + self.assertEqual(deploy_config_params["capabilities"], "CAPABILITY_IAM") # Remove samconfig.toml os.remove(self.test_data_path.joinpath(DEFAULT_CONFIG_FILE_NAME)) @@ -627,7 +634,7 @@ def test_deploy_guided_image_auto(self, template_file): deploy_command_list = self.get_deploy_command_list(template_file=template_path, guided=True) deploy_process_execute = self.run_command_with_input( - deploy_command_list, f"{stack_name}\n\n\n\n\ny\n\n\ny\n\n\n\n".encode() + deploy_command_list, f"{stack_name}\n\n\n\n\ny\n\n\n\n\n\n\n".encode() ) # Deploy should succeed with a managed stack @@ -638,6 +645,10 @@ def test_deploy_guided_image_auto(self, template_file): self._assert_companion_stack(self.cfn_client, companion_stack_name) self._assert_companion_stack_content(self.ecr_client, companion_stack_name) + # Verify the contents in samconfig + config = SamConfig(self.test_data_path) + self._assert_deploy_samconfig_parameters(config, stack_name=stack_name) + # Remove samconfig.toml os.remove(self.test_data_path.joinpath(DEFAULT_CONFIG_FILE_NAME)) @@ -669,6 +680,9 @@ def test_deploy_guided_image_specify(self, template_file, does_ask_for_authoriza self.fail("Companion stack was created. This should not happen with specifying image repos.") self.stacks.append({"name": SAM_CLI_STACK_NAME}) + # Verify the contents in samconfig + config = SamConfig(self.test_data_path) + self._assert_deploy_samconfig_parameters(config, stack_name=stack_name) # Remove samconfig.toml os.remove(self.test_data_path.joinpath(DEFAULT_CONFIG_FILE_NAME)) @@ -690,6 +704,11 @@ def test_deploy_guided_set_parameter(self, template_file): # Deploy should succeed with a managed stack self.assertEqual(deploy_process_execute.process.returncode, 0) self.stacks.append({"name": SAM_CLI_STACK_NAME}) + # Verify the contents in samconfig + config = SamConfig(self.test_data_path) + self._assert_deploy_samconfig_parameters( + config, stack_name=stack_name, parameter_overrides='Parameter="SuppliedParameter"' + ) # Remove samconfig.toml os.remove(self.test_data_path.joinpath(DEFAULT_CONFIG_FILE_NAME)) @@ -710,6 +729,14 @@ def test_deploy_guided_set_capabilities(self, template_file): # Deploy should succeed with a managed stack self.assertEqual(deploy_process_execute.process.returncode, 0) self.stacks.append({"name": SAM_CLI_STACK_NAME}) + # Verify the contents in samconfig + config = SamConfig(self.test_data_path) + self._assert_deploy_samconfig_parameters( + config, + stack_name=stack_name, + capabilities="CAPABILITY_IAM CAPABILITY_NAMED_IAM", + parameter_overrides='Parameter="SuppliedParameter"', + ) # Remove samconfig.toml os.remove(self.test_data_path.joinpath(DEFAULT_CONFIG_FILE_NAME)) @@ -731,6 +758,11 @@ def test_deploy_guided_capabilities_default(self, template_file): # Deploy should succeed with a managed stack self.assertEqual(deploy_process_execute.process.returncode, 0) self.stacks.append({"name": SAM_CLI_STACK_NAME}) + # Verify the contents in samconfig + config = SamConfig(self.test_data_path) + self._assert_deploy_samconfig_parameters( + config, stack_name=stack_name, parameter_overrides='Parameter="SuppliedParameter"' + ) # Remove samconfig.toml os.remove(self.test_data_path.joinpath(DEFAULT_CONFIG_FILE_NAME)) @@ -752,6 +784,11 @@ def test_deploy_guided_set_confirm_changeset(self, template_file): # Deploy should succeed with a managed stack self.assertEqual(deploy_process_execute.process.returncode, 0) self.stacks.append({"name": SAM_CLI_STACK_NAME}) + # Verify the contents in samconfig + config = SamConfig(self.test_data_path) + self._assert_deploy_samconfig_parameters( + config, stack_name=stack_name, confirm_changeset=True, parameter_overrides='Parameter="SuppliedParameter"' + ) # Remove samconfig.toml os.remove(self.test_data_path.joinpath(DEFAULT_CONFIG_FILE_NAME)) @@ -789,7 +826,7 @@ def test_deploy_with_invalid_config(self, template_file, config_file): deploy_process_execute = self.run_command(deploy_command_list) self.assertEqual(deploy_process_execute.process.returncode, 1) - self.assertIn("Error reading configuration: Unexpected character", str(deploy_process_execute.stderr)) + self.assertIn("SamConfigFileReadException: Unexpected character", str(deploy_process_execute.stderr)) @parameterized.expand([("aws-serverless-function.yaml", "samconfig-tags-list.toml")]) def test_deploy_with_valid_config_tags_list(self, template_file, config_file): diff --git a/tests/integration/telemetry/test_experimental_metric.py b/tests/integration/telemetry/test_experimental_metric.py index 977e65053f..702ceb4a5f 100644 --- a/tests/integration/telemetry/test_experimental_metric.py +++ b/tests/integration/telemetry/test_experimental_metric.py @@ -211,8 +211,11 @@ def test_must_send_not_experimental_metrics_if_not_experimental(self): self.assertEqual(process.returncode, 2, "Command should fail") all_requests = server.get_all_requests() - self.assertEqual(1, len(all_requests), "Command run metric must be sent") - request = all_requests[0] + self.assertEqual(2, len(all_requests), "Command run and event metrics must be sent") + # NOTE: Since requests happen asynchronously, we cannot guarantee whether the + # commandRun metric will be first or second, so we sort for consistency. + all_requests.sort(key=lambda x: list(x["data"]["metrics"][0].keys())[0]) + request = all_requests[0] # "commandRun" comes before "events" self.assertIn("Content-Type", request["headers"]) self.assertEqual(request["headers"]["Content-Type"], "application/json") diff --git a/tests/integration/telemetry/test_installed_metric.py b/tests/integration/telemetry/test_installed_metric.py index e17459828c..725a2d0ff5 100644 --- a/tests/integration/telemetry/test_installed_metric.py +++ b/tests/integration/telemetry/test_installed_metric.py @@ -24,7 +24,9 @@ def test_send_installed_metric_on_first_run(self): self.assertIn(EXPECTED_TELEMETRY_PROMPT, stderrdata.decode()) all_requests = server.get_all_requests() - self.assertEqual(2, len(all_requests), "There should be exactly two metrics request") + self.assertEqual( + 3, len(all_requests), "There should be exactly three metrics request" + ) # 3 = 2 expected + events # First one is usually the installed metric requests = filter_installed_metric_requests(all_requests) diff --git a/tests/integration/telemetry/test_telemetry_contract.py b/tests/integration/telemetry/test_telemetry_contract.py index 08b3585b99..a3e383bb5d 100644 --- a/tests/integration/telemetry/test_telemetry_contract.py +++ b/tests/integration/telemetry/test_telemetry_contract.py @@ -28,7 +28,9 @@ def test_must_not_send_metrics_if_disabled_using_envvar(self): self.assertEqual(process.returncode, 0, "Command should successfully complete") all_requests = server.get_all_requests() - self.assertEqual(1, len(all_requests), "Command run metric should be sent") + self.assertEqual( + 2, len(all_requests), "Command run and event metrics should be sent" + ) # 2 = cmd_run + events def test_must_send_metrics_if_enabled_via_envvar(self): """ @@ -52,7 +54,7 @@ def test_must_send_metrics_if_enabled_via_envvar(self): self.assertEqual(process.returncode, 0, "Command should successfully complete") all_requests = server.get_all_requests() - self.assertEqual(1, len(all_requests), "Command run metric must be sent") + self.assertEqual(2, len(all_requests), "Command run and event metrics must be sent") # cmd_run + events def test_must_not_crash_when_offline(self): """ diff --git a/tests/integration/testdata/buildcmd/samconfig/samconfig.json b/tests/integration/testdata/buildcmd/samconfig/samconfig.json new file mode 100644 index 0000000000..6fa18e1c11 --- /dev/null +++ b/tests/integration/testdata/buildcmd/samconfig/samconfig.json @@ -0,0 +1,12 @@ +{ + "version": 0.1, + "default": { + "build": { + "parameters": { + "build_dir": ".json", + "cached": true, + "parameter_overrides": "Runtime=python3.9 CodeUri=SomeURI Handler=SomeHandler" + } + } + } +} \ No newline at end of file diff --git a/tests/integration/testdata/buildcmd/samconfig/samconfig.toml b/tests/integration/testdata/buildcmd/samconfig/samconfig.toml new file mode 100644 index 0000000000..23a769ff5e --- /dev/null +++ b/tests/integration/testdata/buildcmd/samconfig/samconfig.toml @@ -0,0 +1,5 @@ +version = 0.1 +[default.build.parameters] +build_dir = ".toml" +cached = true +parameter_overrides = "Runtime=python3.9 CodeUri=SomeURI Handler=SomeHandler" \ No newline at end of file diff --git a/tests/integration/testdata/buildcmd/samconfig/samconfig.yaml b/tests/integration/testdata/buildcmd/samconfig/samconfig.yaml new file mode 100644 index 0000000000..63af206238 --- /dev/null +++ b/tests/integration/testdata/buildcmd/samconfig/samconfig.yaml @@ -0,0 +1,7 @@ +version: 0.1 +default: + build: + parameters: + build_dir: .yaml + cached: true + parameter_overrides: Runtime=python3.9 CodeUri=SomeURI Handler=SomeHandler \ No newline at end of file diff --git a/tests/integration/testdata/buildcmd/samconfig/samconfig.yml b/tests/integration/testdata/buildcmd/samconfig/samconfig.yml new file mode 100644 index 0000000000..4af8baa434 --- /dev/null +++ b/tests/integration/testdata/buildcmd/samconfig/samconfig.yml @@ -0,0 +1,7 @@ +version: 0.1 +default: + build: + parameters: + build_dir: .yml + cached: true + parameter_overrides: Runtime=python3.9 CodeUri=SomeURI Handler=SomeHandler \ No newline at end of file diff --git a/tests/integration/testdata/buildcmd/samconfig/template.yaml b/tests/integration/testdata/buildcmd/samconfig/template.yaml new file mode 100644 index 0000000000..6944799912 --- /dev/null +++ b/tests/integration/testdata/buildcmd/samconfig/template.yaml @@ -0,0 +1,37 @@ +AWSTemplateFormatVersion : '2010-09-09' +Transform: AWS::Serverless-2016-10-31 + +Parameters: + Runtime: + Type: String + CodeUri: + Type: String + Handler: + Type: String + +Resources: + + Function: + Type: AWS::Serverless::Function + Properties: + Handler: !Ref Handler + Runtime: !Ref Runtime + CodeUri: !Ref CodeUri + Timeout: 600 + + + OtherRelativePathResource: + Type: AWS::ApiGateway::RestApi + Properties: + BodyS3Location: SomeRelativePath + + GlueResource: + Type: AWS::Glue::Job + Properties: + Command: + ScriptLocation: SomeRelativePath + + ExampleNestedStack: + Type: AWS::CloudFormation::Stack + Properties: + TemplateURL: https://s3.amazonaws.com/examplebucket/exampletemplate.yml diff --git a/tests/unit/cli/test_cli_config_file.py b/tests/unit/cli/test_cli_config_file.py index 606e0a004e..19bfcfa011 100644 --- a/tests/unit/cli/test_cli_config_file.py +++ b/tests/unit/cli/test_cli_config_file.py @@ -5,8 +5,11 @@ from unittest import TestCase, skipIf from unittest.mock import MagicMock, patch +import tomlkit + from samcli.commands.exceptions import ConfigException -from samcli.cli.cli_config_file import TomlProvider, configuration_option, configuration_callback, get_ctx_defaults +from samcli.cli.cli_config_file import ConfigProvider, configuration_option, configuration_callback, get_ctx_defaults +from samcli.lib.config.exceptions import SamConfigFileReadException, SamConfigVersionException from samcli.lib.config.samconfig import DEFAULT_ENV from tests.testing_utils import IS_WINDOWS @@ -21,9 +24,9 @@ def __init__(self, info_name, parent, params=None, command=None, default_map=Non self.default_map = default_map -class TestTomlProvider(TestCase): +class TestConfigProvider(TestCase): def setUp(self): - self.toml_provider = TomlProvider() + self.config_provider = ConfigProvider() self.config_env = "config_env" self.parameters = "parameters" self.cmd_name = "topic" @@ -33,29 +36,30 @@ def test_toml_valid_with_section(self): config_path = Path(config_dir, "samconfig.toml") config_path.write_text("version=0.1\n[config_env.topic.parameters]\nword='clarity'\n") self.assertEqual( - TomlProvider(section=self.parameters)(config_path, self.config_env, [self.cmd_name]), {"word": "clarity"} + ConfigProvider(section=self.parameters)(config_path, self.config_env, [self.cmd_name]), {"word": "clarity"} ) def test_toml_valid_with_no_version(self): config_dir = tempfile.gettempdir() config_path = Path(config_dir, "samconfig.toml") config_path.write_text("[config_env.topic.parameters]\nword='clarity'\n") - with self.assertRaises(ConfigException): - TomlProvider(section=self.parameters)(config_path, self.config_env, [self.cmd_name]) + with self.assertRaises(SamConfigVersionException): + ConfigProvider(section=self.parameters)(config_path, self.config_env, [self.cmd_name]) def test_toml_valid_with_invalid_version(self): config_dir = tempfile.gettempdir() config_path = Path(config_dir, "samconfig.toml") config_path.write_text("version='abc'\n[config_env.topic.parameters]\nword='clarity'\n") - with self.assertRaises(ConfigException): - TomlProvider(section=self.parameters)(config_path, self.config_env, [self.cmd_name]) + with self.assertRaises(SamConfigVersionException): + ConfigProvider(section=self.parameters)(config_path, self.config_env, [self.cmd_name]) def test_toml_invalid_empty_dict(self): config_dir = tempfile.gettempdir() config_path = Path(config_dir, "samconfig.toml") config_path.write_text("[topic]\nword=clarity\n") - self.assertEqual(self.toml_provider(config_dir, self.config_env, [self.cmd_name]), {}) + with self.assertRaises(SamConfigFileReadException): + self.config_provider(config_path, self.config_env, [self.cmd_name]) def test_toml_invalid_file_name(self): config_dir = tempfile.gettempdir() @@ -63,16 +67,16 @@ def test_toml_invalid_file_name(self): config_path.write_text("version=0.1\n[config_env.topic.parameters]\nword='clarity'\n") config_path_invalid = Path(config_dir, "samconfig.toml") - with self.assertRaises(ConfigException): - self.toml_provider(config_path_invalid, self.config_env, [self.cmd_name]) + with self.assertRaises(SamConfigFileReadException): + self.config_provider(config_path_invalid, self.config_env, [self.cmd_name]) def test_toml_invalid_syntax(self): config_dir = tempfile.gettempdir() config_path = Path(config_dir, "samconfig.toml") config_path.write_text("version=0.1\n[config_env.topic.parameters]\nword=_clarity'\n") - with self.assertRaises(ConfigException): - self.toml_provider(config_path, self.config_env, [self.cmd_name]) + with self.assertRaises(SamConfigFileReadException): + self.config_provider(config_path, self.config_env, [self.cmd_name]) class TestCliConfiguration(TestCase): @@ -121,6 +125,7 @@ def test_callback_with_invalid_config_file(self): self.ctx.parent = mock_context3 self.ctx.info_name = "test_info" self.ctx.params = {"config_file": "invalid_config_file"} + self.ctx._parameter_source.__get__ = "COMMANDLINE" setattr(self.ctx, "samconfig_dir", None) with self.assertRaises(ConfigException): configuration_callback( @@ -197,8 +202,8 @@ def test_callback_with_config_file_from_pipe(self): self.assertNotIn(self.value, self.saved_callback.call_args[0]) def test_configuration_option(self): - toml_provider = TomlProvider() - click_option = configuration_option(provider=toml_provider) + config_provider = ConfigProvider() + click_option = configuration_option(provider=config_provider) clc = click_option(self.Dummy()) self.assertEqual(clc.__click_params__[0].is_eager, True) self.assertEqual( @@ -207,7 +212,7 @@ def test_configuration_option(self): ) self.assertEqual(clc.__click_params__[0].hidden, True) self.assertEqual(clc.__click_params__[0].expose_value, False) - self.assertEqual(clc.__click_params__[0].callback.args, (None, None, None, toml_provider)) + self.assertEqual(clc.__click_params__[0].callback.args, (None, None, None, config_provider)) def test_get_ctx_defaults_non_nested(self): provider = MagicMock() diff --git a/tests/unit/commands/delete/test_delete_context.py b/tests/unit/commands/delete/test_delete_context.py index daa72c187b..92e2aa2c6a 100644 --- a/tests/unit/commands/delete/test_delete_context.py +++ b/tests/unit/commands/delete/test_delete_context.py @@ -7,7 +7,7 @@ from samcli.commands.delete.delete_context import DeleteContext from samcli.lib.package.artifact_exporter import Template -from samcli.cli.cli_config_file import TomlProvider +from samcli.cli.cli_config_file import ConfigProvider from samcli.lib.delete.cfn_utils import CfnUtils from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.package.ecr_uploader import ECRUploader @@ -58,7 +58,7 @@ def test_delete_context_enter(self, get_boto_client_provider_mock): self.assertEqual(delete_context.init_clients.call_count, 1) @patch.object( - TomlProvider, + ConfigProvider, "__call__", MagicMock( return_value=( @@ -123,7 +123,7 @@ def test_delete_no_user_input( self.assertEqual(expected_prompt_calls, patched_prompt.call_args_list) @patch.object( - TomlProvider, + ConfigProvider, "__call__", MagicMock( return_value=( @@ -506,7 +506,7 @@ def test_s3_option_flag(self): self.assertEqual(delete_context.s3_prefix, "s3_prefix") @patch.object( - TomlProvider, + ConfigProvider, "__call__", MagicMock( return_value=( diff --git a/tests/unit/commands/samconfig/test_samconfig.py b/tests/unit/commands/samconfig/test_samconfig.py index 675f22a4bf..b2e0822c78 100644 --- a/tests/unit/commands/samconfig/test_samconfig.py +++ b/tests/unit/commands/samconfig/test_samconfig.py @@ -8,7 +8,6 @@ import tempfile from pathlib import Path from contextlib import contextmanager -from samcli.lib.config.samconfig import SamConfig, DEFAULT_ENV from click.testing import CliRunner @@ -16,6 +15,7 @@ from unittest.mock import patch, ANY import logging +from samcli.lib.config.samconfig import SamConfig, DEFAULT_ENV from samcli.lib.utils.packagetype import ZIP, IMAGE LOG = logging.getLogger() diff --git a/tests/unit/lib/samconfig/test_file_manager.py b/tests/unit/lib/samconfig/test_file_manager.py new file mode 100644 index 0000000000..18df66474c --- /dev/null +++ b/tests/unit/lib/samconfig/test_file_manager.py @@ -0,0 +1,277 @@ +import json +from pathlib import Path +import tempfile +from unittest import TestCase, skip + +import tomlkit +from ruamel.yaml import YAML + +from samcli.lib.config.exceptions import FileParseException +from samcli.lib.config.file_manager import COMMENT_KEY, JsonFileManager, TomlFileManager, YamlFileManager + + +class TestTomlFileManager(TestCase): + def test_read_toml(self): + config_dir = tempfile.gettempdir() + config_path = Path(config_dir, "samconfig.toml") + config_path.write_text( + "version=0.1\n[config_env.topic1.parameters]\nword='clarity'\nmultiword=['thing 1', 'thing 2']" + ) + config_doc = TomlFileManager.read(config_path) + self.assertEqual( + config_doc, + { + "version": 0.1, + "config_env": {"topic1": {"parameters": {"word": "clarity", "multiword": ["thing 1", "thing 2"]}}}, + }, + ) + + def test_read_toml_invalid_toml(self): + config_dir = tempfile.gettempdir() + config_path = Path(config_dir, "samconfig.toml") + config_path.write_text("fake='not real'\nimproper toml file\n") + with self.assertRaises(FileParseException): + TomlFileManager.read(config_path) + + def test_read_toml_file_path_not_valid(self): + config_dir = "path/that/doesnt/exist" + config_path = Path(config_dir, "samconfig.toml") + config_doc = TomlFileManager.read(config_path) + self.assertEqual(config_doc, tomlkit.document()) + + def test_write_toml(self): + config_dir = tempfile.gettempdir() + config_path = Path(config_dir, "samconfig.toml") + toml = { + "version": 0.1, + "config_env": {"topic2": {"parameters": {"word": "clarity"}}}, + COMMENT_KEY: "This is a comment", + } + + TomlFileManager.write(toml, config_path) + + txt = config_path.read_text() + self.assertIn("version = 0.1", txt) + self.assertIn("[config_env.topic2.parameters]", txt) + self.assertIn('word = "clarity"', txt) + self.assertIn("# This is a comment", txt) + self.assertNotIn(COMMENT_KEY, txt) + + def test_dont_write_toml_if_empty(self): + config_dir = tempfile.gettempdir() + config_path = Path(config_dir, "samconfig.toml") + config_path.write_text("nothing to see here\n") + toml = {} + + TomlFileManager.write(toml, config_path) + + self.assertEqual(config_path.read_text(), "nothing to see here\n") + + def test_write_toml_bad_path(self): + config_path = Path("path/to/some", "file_that_doesnt_exist.toml") + with self.assertRaises(FileNotFoundError): + TomlFileManager.write({"key": "some value"}, config_path) + + def test_write_toml_file(self): + config_dir = tempfile.gettempdir() + config_path = Path(config_dir, "samconfig.toml") + toml = tomlkit.parse('# This is a comment\nversion = 0.1\n[config_env.topic2.parameters]\nword = "clarity"\n') + + TomlFileManager.write(toml, config_path) + + txt = config_path.read_text() + self.assertIn("version = 0.1", txt) + self.assertIn("[config_env.topic2.parameters]", txt) + self.assertIn('word = "clarity"', txt) + self.assertIn("# This is a comment", txt) + + def test_dont_write_toml_file_if_empty(self): + config_dir = tempfile.gettempdir() + config_path = Path(config_dir, "samconfig.toml") + config_path.write_text("nothing to see here\n") + toml = tomlkit.document() + + TomlFileManager.write(toml, config_path) + + self.assertEqual(config_path.read_text(), "nothing to see here\n") + + def test_write_toml_file_bad_path(self): + config_path = Path("path/to/some", "file_that_doesnt_exist.toml") + with self.assertRaises(FileNotFoundError): + TomlFileManager.write(tomlkit.parse('key = "some value"'), config_path) + + def test_toml_put_comment(self): + toml_doc = tomlkit.loads('version = 0.1\n[config_env.topic2.parameters]\nword = "clarity"\n') + + toml_doc = TomlFileManager.put_comment(toml_doc, "This is a comment") + + txt = tomlkit.dumps(toml_doc) + self.assertIn("# This is a comment", txt) + + +class TestYamlFileManager(TestCase): + + yaml = YAML() + + def test_read_yaml(self): + config_dir = tempfile.gettempdir() + config_path = Path(config_dir, "samconfig.yaml") + config_path.write_text( + "version: 0.1\nconfig_env:\n topic1:\n parameters:\n word: clarity\n multiword: [thing 1, thing 2]" + ) + + config_doc = YamlFileManager.read(config_path) + + self.assertEqual( + config_doc, + { + "version": 0.1, + "config_env": {"topic1": {"parameters": {"word": "clarity", "multiword": ["thing 1", "thing 2"]}}}, + }, + ) + + def test_read_yaml_invalid_yaml(self): + config_dir = tempfile.gettempdir() + config_path = Path(config_dir, "samconfig.yaml") + config_path.write_text("fake: not real\nthisYaml isn't correct") + + with self.assertRaises(FileParseException): + YamlFileManager.read(config_path) + + def test_read_yaml_file_path_not_valid(self): + config_dir = "path/that/doesnt/exist" + config_path = Path(config_dir, "samconfig.yaml") + + config_doc = YamlFileManager.read(config_path) + + self.assertEqual(config_doc, {}) + + def test_write_yaml(self): + config_dir = tempfile.gettempdir() + config_path = Path(config_dir, "samconfig.yaml") + yaml = { + "version": 0.1, + "config_env": {"topic2": {"parameters": {"word": "clarity"}}}, + COMMENT_KEY: "This is a comment", + } + + YamlFileManager.write(yaml, config_path) + + txt = config_path.read_text() + self.assertIn("version: 0.1", txt) + self.assertIn("config_env:\n topic2:\n parameters:\n", txt) + self.assertIn("word: clarity", txt) + self.assertIn("# This is a comment", txt) + self.assertNotIn(COMMENT_KEY, txt) + + def test_dont_write_yaml_if_empty(self): + config_dir = tempfile.gettempdir() + config_path = Path(config_dir, "samconfig.yaml") + config_path.write_text("nothing to see here\n") + yaml = {} + + YamlFileManager.write(yaml, config_path) + + self.assertEqual(config_path.read_text(), "nothing to see here\n") + + def test_write_yaml_file_bad_path(self): + config_path = Path("path/to/some", "file_that_doesnt_exist.yaml") + + with self.assertRaises(FileNotFoundError): + YamlFileManager.write(self.yaml.load("key: some value"), config_path) + + def test_yaml_put_comment(self): + config_dir = tempfile.gettempdir() + config_path = Path(config_dir, "samconfig.yaml") + yaml_doc = self.yaml.load("version: 0.1\nconfig_env:\n topic2:\n parameters:\n word: clarity\n") + + yaml_doc = YamlFileManager.put_comment(yaml_doc, "This is a comment") + + self.yaml.dump(yaml_doc, config_path) + txt = config_path.read_text() + self.assertIn("# This is a comment", txt) + + +@skip("JSON config support disabled") +class TestJsonFileManager(TestCase): + def test_read_json(self): + config_dir = tempfile.gettempdir() + config_path = Path(config_dir, "samconfig.json") + config_path.write_text( + json.dumps( + { + "version": 0.1, + "config_env": {"topic1": {"parameters": {"word": "clarity", "multiword": ["thing 1", "thing 2"]}}}, + }, + indent=JsonFileManager.INDENT_SIZE, + ) + ) + + config_doc = JsonFileManager.read(config_path) + + self.assertEqual( + config_doc, + { + "version": 0.1, + "config_env": {"topic1": {"parameters": {"word": "clarity", "multiword": ["thing 1", "thing 2"]}}}, + }, + ) + + def test_read_json_invalid_json(self): + config_dir = tempfile.gettempdir() + config_path = Path(config_dir, "samconfig.json") + config_path.write_text("{\n" + ' "bad_file": "very bad"\n' + ' "improperly": "formatted"\n' + "}\n") + + with self.assertRaises(FileParseException): + JsonFileManager.read(config_path) + + def test_read_json_file_path_not_valid(self): + config_dir = "path/that/doesnt/exist" + config_path = Path(config_dir, "samconfig.json") + + config_doc = JsonFileManager.read(config_path) + + self.assertEqual(config_doc, {}) + + def test_write_json(self): + config_dir = tempfile.gettempdir() + config_path = Path(config_dir, "samconfig.json") + json_doc = { + "version": 0.1, + "config_env": {"topic2": {"parameters": {"word": "clarity"}}}, + COMMENT_KEY: "This is a comment", + } + + JsonFileManager.write(json_doc, config_path) + + txt = config_path.read_text() + self.assertIn('"version": 0.1', txt) + self.assertIn('"config_env": {', txt) + self.assertIn('"topic2": {', txt) + self.assertIn('"parameters": {', txt) + self.assertIn('"word": "clarity"', txt) + self.assertIn(f'"{COMMENT_KEY}": "This is a comment"', txt) + + def test_dont_write_json_if_empty(self): + config_dir = tempfile.gettempdir() + config_path = Path(config_dir, "samconfig.json") + config_path.write_text("nothing to see here\n") + json_doc = {} + + JsonFileManager.write(json_doc, config_path) + + self.assertEqual(config_path.read_text(), "nothing to see here\n") + + def test_write_json_file_bad_path(self): + config_path = Path("path/to/some", "file_that_doesnt_exist.json") + + with self.assertRaises(FileNotFoundError): + JsonFileManager.write({"key": "value"}, config_path) + + def test_json_put_comment(self): + json_doc = {"version": 0.1, "config_env": {"topic1": {"parameters": {"word": "clarity"}}}} + + json_doc = JsonFileManager.put_comment(json_doc, "This is a comment") + + txt = json.dumps(json_doc) + self.assertIn(f'"{COMMENT_KEY}": "This is a comment"', txt) diff --git a/tests/unit/lib/samconfig/test_samconfig.py b/tests/unit/lib/samconfig/test_samconfig.py index 7a86e6f97d..c58f0709a6 100644 --- a/tests/unit/lib/samconfig/test_samconfig.py +++ b/tests/unit/lib/samconfig/test_samconfig.py @@ -1,10 +1,21 @@ import os from pathlib import Path +from unittest.mock import patch +from parameterized import parameterized +import tempfile from unittest import TestCase -from samcli.lib.config.exceptions import SamConfigVersionException -from samcli.lib.config.samconfig import SamConfig, DEFAULT_CONFIG_FILE_NAME, DEFAULT_GLOBAL_CMDNAME, DEFAULT_ENV +from samcli.lib.config.exceptions import SamConfigFileReadException, SamConfigVersionException +from samcli.lib.config.file_manager import FILE_MANAGER_MAPPER, JsonFileManager, TomlFileManager, YamlFileManager +from samcli.lib.config.samconfig import ( + DEFAULT_CONFIG_FILE, + SamConfig, + DEFAULT_CONFIG_FILE_NAME, + DEFAULT_GLOBAL_CMDNAME, + DEFAULT_ENV, +) from samcli.lib.config.version import VERSION_KEY, SAM_CONFIG_VERSION +from samcli.lib.telemetry.event import Event from samcli.lib.utils import osutils @@ -182,27 +193,27 @@ def test_check_sanity(self): def test_check_version_non_supported_type(self): self._setup_config() - self.samconfig.document.remove(VERSION_KEY) - self.samconfig.document.add(VERSION_KEY, "aadeff") + self.samconfig.document.pop(VERSION_KEY) + self.samconfig.document.update({VERSION_KEY: "aadeff"}) with self.assertRaises(SamConfigVersionException): self.samconfig.sanity_check() def test_check_version_no_version_exists(self): self._setup_config() - self.samconfig.document.remove(VERSION_KEY) + self.samconfig.document.pop(VERSION_KEY) with self.assertRaises(SamConfigVersionException): self.samconfig.sanity_check() def test_check_version_float(self): self._setup_config() - self.samconfig.document.remove(VERSION_KEY) - self.samconfig.document.add(VERSION_KEY, 0.2) + self.samconfig.document.pop(VERSION_KEY) + self.samconfig.document.update({VERSION_KEY: 0.2}) self.samconfig.sanity_check() def test_write_config_file_non_standard_version(self): self._setup_config() - self.samconfig.document.remove(VERSION_KEY) - self.samconfig.document.add(VERSION_KEY, 0.2) + self.samconfig.document.pop(VERSION_KEY) + self.samconfig.document.update({VERSION_KEY: 0.2}) self.samconfig.put(cmd_names=["local", "start", "api"], section="parameters", key="skip_pull_image", value=True) self.samconfig.sanity_check() self.assertEqual(self.samconfig.document.get(VERSION_KEY), 0.2) @@ -210,7 +221,7 @@ def test_write_config_file_non_standard_version(self): def test_write_config_file_will_create_the_file_if_not_exist(self): with osutils.mkdir_temp(ignore_errors=True) as tempdir: non_existing_dir = os.path.join(tempdir, "non-existing-dir") - non_existing_file = "non-existing-file" + non_existing_file = "non-existing-file.toml" samconfig = SamConfig(config_dir=non_existing_dir, filename=non_existing_file) self.assertFalse(samconfig.exists()) @@ -221,3 +232,84 @@ def test_write_config_file_will_create_the_file_if_not_exist(self): samconfig.put(cmd_names=["any", "command"], section="any-section", key="any-key", value="any-value") samconfig.flush() self.assertTrue(samconfig.exists()) + + def test_passed_filename_used(self): + config_path = Path(self.config_dir, "myconfigfile.toml") + + self.assertFalse(config_path.exists()) + + self.samconfig = SamConfig(self.config_dir, filename="myconfigfile.toml") + self.samconfig.put( # put some config options so it creates the file + cmd_names=["any", "command"], section="section", key="key", value="value" + ) + self.samconfig.flush() + + self.assertTrue(config_path.exists()) + self.assertFalse(Path(self.config_dir, DEFAULT_CONFIG_FILE_NAME).exists()) + + def test_config_uses_default_if_none_provided(self): + self.samconfig = SamConfig(self.config_dir) + self.samconfig.put( # put some config options so it creates the file + cmd_names=["any", "command"], section="section", key="key", value="value" + ) + self.samconfig.flush() + + self.assertTrue(Path(self.config_dir, DEFAULT_CONFIG_FILE_NAME).exists()) + + def test_config_priority(self): + config_files = [] + extensions_in_priority = list(FILE_MANAGER_MAPPER.keys()) # priority by order in dict + for extension in extensions_in_priority: + filename = DEFAULT_CONFIG_FILE + extension + config = SamConfig(self.config_dir, filename=filename) + config.put( # put some config options so it creates the file + cmd_names=["any", "command"], section="section", key="key", value="value" + ) + config.flush() + config_files.append(config) + + while extensions_in_priority: + config = SamConfig(self.config_dir) + next_priority = extensions_in_priority.pop(0) + self.assertEqual(config.filepath, Path(self.config_dir, DEFAULT_CONFIG_FILE + next_priority)) + os.remove(config.path()) + + +class TestSamConfigFileManager(TestCase): + def test_file_manager_not_declared(self): + config_dir = tempfile.gettempdir() + config_path = Path(config_dir, "samconfig") + + with self.assertRaises(SamConfigFileReadException): + SamConfig(config_path, filename="samconfig") + + def test_file_manager_unsupported(self): + config_dir = tempfile.gettempdir() + config_path = Path(config_dir, "samconfig.jpeg") + + with self.assertRaises(SamConfigFileReadException): + SamConfig(config_path, filename="samconfig.jpeg") + + @parameterized.expand( + [ + ("samconfig.toml", TomlFileManager, ".toml"), + ("samconfig.yaml", YamlFileManager, ".yaml"), + ("samconfig.yml", YamlFileManager, ".yml"), + # ("samconfig.json", JsonFileManager, ".json"), + ] + ) + @patch("samcli.lib.telemetry.event.EventTracker.track_event") + def test_file_manager(self, filename, expected_file_manager, expected_extension, track_mock): + config_dir = tempfile.gettempdir() + config_path = Path(config_dir, filename) + tracked_events = [] + + def mock_tracker(name, value): # when track_event is called, just append the Event to our list + tracked_events.append(Event(name, value)) + + track_mock.side_effect = mock_tracker + + samconfig = SamConfig(config_path, filename=filename) + + self.assertIs(samconfig.file_manager, expected_file_manager) + self.assertIn(Event("SamConfigFileExtension", expected_extension), tracked_events) From 78219627c1ff878dd2d26a2085d307af596ec39d Mon Sep 17 00:00:00 2001 From: Leonardo Gama Date: Wed, 12 Jul 2023 15:40:42 -0700 Subject: [PATCH 20/32] Generate sam deploy schema --- schema/samcli.json | 170 ++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 168 insertions(+), 2 deletions(-) diff --git a/schema/samcli.json b/schema/samcli.json index cda45947bf..ad8289fb37 100644 --- a/schema/samcli.json +++ b/schema/samcli.json @@ -143,9 +143,175 @@ "properties": { "parameters": { "title": "Parameters for the deploy command", - "description": "Available parameters for the deploy command:\n", + "description": "Available parameters for the deploy command:\n* config_env:\nEnvironment name specifying default parameter values in the configuration file.\n* config_file:\nConfiguration file containing default parameter values.\n* guided:\nSpecify this flag to allow SAM CLI to guide you through the deployment using guided prompts.\n* template_file:\nAWS SAM template which references built artifacts for resources in the template. (if applicable)\n* no_execute_changeset:\nIndicates whether to execute the change set. Specify this flag to view stack changes before executing the change set.\n* fail_on_empty_changeset:\nSpecify whether AWS SAM CLI should return a non-zero exit code if there are no changes to be made to the stack. Defaults to a non-zero exit code.\n* confirm_changeset:\nPrompt to confirm if the computed changeset is to be deployed by SAM CLI.\n* disable_rollback:\nPreserves the state of previously provisioned resources when an operation fails.\n* on_failure:\nProvide an action to determine what will happen when a stack fails to create. Three actions are available:\n\n- ROLLBACK: This will rollback a stack to a previous known good state.\n\n- DELETE: The stack will rollback to a previous state if one exists, otherwise the stack will be deleted.\n\n- DO_NOTHING: The stack will not rollback or delete, this is the same as disabling rollback.\n\nDefault behaviour is ROLLBACK.\n\n\n\nThis option is mutually exclusive with --disable-rollback/--no-disable-rollback. You can provide\n--on-failure or --disable-rollback/--no-disable-rollback but not both at the same time.\n* stack_name:\nName of the AWS CloudFormation stack.\n* s3_bucket:\nAWS S3 bucket where artifacts referenced in the template are uploaded.\n* image_repository:\nAWS ECR repository URI where artifacts referenced in the template are uploaded.\n* image_repositories:\nMapping of Function Logical ID to AWS ECR Repository URI.\n\nExample: Function_Logical_ID=ECR_Repo_Uri\nThis option can be specified multiple times.\n* force_upload:\nIndicates whether to override existing files in the S3 bucket. Specify this flag to upload artifacts even if they match existing artifacts in the S3 bucket.\n* s3_prefix:\nPrefix name that is added to the artifact's name when it is uploaded to the AWS S3 bucket.\n* kms_key_id:\nThe ID of an AWS KMS key that is used to encrypt artifacts that are at rest in the AWS S3 bucket.\n* role_arn:\nARN of an IAM role that AWS Cloudformation assumes when executing a deployment change set.\n* use_json:\nIndicates whether to use JSON as the format for the output AWS CloudFormation template. YAML is used by default.\n* resolve_s3:\nAutomatically resolve AWS S3 bucket for non-guided deployments. Enabling this option will also create a managed default AWS S3 bucket for you. If one does not provide a --s3-bucket value, the managed bucket will be used. Do not use --guided with this option.\n* resolve_image_repos:\nAutomatically create and delete ECR repositories for image-based functions in non-guided deployments. A companion stack containing ECR repos for each function will be deployed along with the template stack. Automatically created image repositories will be deleted if the corresponding functions are removed.\n* metadata:\nMap of metadata to attach to ALL the artifacts that are referenced in the template.\n* notification_arns:\nARNs of SNS topics that AWS Cloudformation associates with the stack.\n* tags:\nList of tags to associate with the stack.\n* parameter_overrides:\nString that contains AWS CloudFormation parameter overrides encoded as key=value pairs.\n* signing_profiles:\nA string that contains Code Sign configuration parameters as FunctionOrLayerNameToSign=SigningProfileName:SigningProfileOwner Since signing profile owner is optional, it could also be written as FunctionOrLayerNameToSign=SigningProfileName\n* no_progressbar:\nDoes not showcase a progress bar when uploading artifacts to S3 and pushing docker images to ECR\n* capabilities:\nList of capabilities that one must specify before AWS Cloudformation can create certain stacks.\n\nAccepted Values: CAPABILITY_IAM, CAPABILITY_NAMED_IAM, CAPABILITY_RESOURCE_POLICY, CAPABILITY_AUTO_EXPAND.\n\nLearn more at: https://docs.aws.amazon.com/serverlessrepo/latest/devguide/acknowledging-application-capabilities.html\n* profile:\nSelect a specific profile from your credential file to get AWS credentials.\n* region:\nSet the AWS Region of the service. (e.g. us-east-1)\n* beta_features:\nEnable/Disable beta features.\n* debug:\nTurn on debug logging to print debug message generated by AWS SAM CLI and display timestamps.", "type": "object", - "properties": {} + "properties": { + "config_env": { + "title": "config_env", + "type": "string", + "description": "Environment name specifying default parameter values in the configuration file.", + "default": "default" + }, + "config_file": { + "title": "config_file", + "type": "string", + "description": "Configuration file containing default parameter values.", + "default": "samconfig.toml" + }, + "guided": { + "title": "guided", + "type": "boolean", + "description": "Specify this flag to allow SAM CLI to guide you through the deployment using guided prompts." + }, + "template_file": { + "title": "template_file", + "type": "string", + "description": "AWS SAM template which references built artifacts for resources in the template. (if applicable)", + "default": "template.[yaml|yml|json]" + }, + "no_execute_changeset": { + "title": "no_execute_changeset", + "type": "boolean", + "description": "Indicates whether to execute the change set. Specify this flag to view stack changes before executing the change set." + }, + "fail_on_empty_changeset": { + "title": "fail_on_empty_changeset", + "type": "boolean", + "description": "Specify whether AWS SAM CLI should return a non-zero exit code if there are no changes to be made to the stack. Defaults to a non-zero exit code.", + "default": true + }, + "confirm_changeset": { + "title": "confirm_changeset", + "type": "boolean", + "description": "Prompt to confirm if the computed changeset is to be deployed by SAM CLI." + }, + "disable_rollback": { + "title": "disable_rollback", + "type": "boolean", + "description": "Preserves the state of previously provisioned resources when an operation fails." + }, + "on_failure": { + "title": "on_failure", + "type": "string", + "description": "Provide an action to determine what will happen when a stack fails to create. Three actions are available:\n\n- ROLLBACK: This will rollback a stack to a previous known good state.\n\n- DELETE: The stack will rollback to a previous state if one exists, otherwise the stack will be deleted.\n\n- DO_NOTHING: The stack will not rollback or delete, this is the same as disabling rollback.\n\nDefault behaviour is ROLLBACK.\n\n\n\nThis option is mutually exclusive with --disable-rollback/--no-disable-rollback. You can provide\n--on-failure or --disable-rollback/--no-disable-rollback but not both at the same time.", + "default": "ROLLBACK", + "enum": [ + "ROLLBACK", + "DELETE", + "DO_NOTHING" + ] + }, + "stack_name": { + "title": "stack_name", + "type": "string", + "description": "Name of the AWS CloudFormation stack." + }, + "s3_bucket": { + "title": "s3_bucket", + "type": "string", + "description": "AWS S3 bucket where artifacts referenced in the template are uploaded." + }, + "image_repository": { + "title": "image_repository", + "type": "string", + "description": "AWS ECR repository URI where artifacts referenced in the template are uploaded." + }, + "image_repositories": { + "title": "image_repositories", + "type": "string", + "description": "Mapping of Function Logical ID to AWS ECR Repository URI.\n\nExample: Function_Logical_ID=ECR_Repo_Uri\nThis option can be specified multiple times." + }, + "force_upload": { + "title": "force_upload", + "type": "boolean", + "description": "Indicates whether to override existing files in the S3 bucket. Specify this flag to upload artifacts even if they match existing artifacts in the S3 bucket." + }, + "s3_prefix": { + "title": "s3_prefix", + "type": "string", + "description": "Prefix name that is added to the artifact's name when it is uploaded to the AWS S3 bucket." + }, + "kms_key_id": { + "title": "kms_key_id", + "type": "string", + "description": "The ID of an AWS KMS key that is used to encrypt artifacts that are at rest in the AWS S3 bucket." + }, + "role_arn": { + "title": "role_arn", + "type": "string", + "description": "ARN of an IAM role that AWS Cloudformation assumes when executing a deployment change set." + }, + "use_json": { + "title": "use_json", + "type": "boolean", + "description": "Indicates whether to use JSON as the format for the output AWS CloudFormation template. YAML is used by default." + }, + "resolve_s3": { + "title": "resolve_s3", + "type": "boolean", + "description": "Automatically resolve AWS S3 bucket for non-guided deployments. Enabling this option will also create a managed default AWS S3 bucket for you. If one does not provide a --s3-bucket value, the managed bucket will be used. Do not use --guided with this option." + }, + "resolve_image_repos": { + "title": "resolve_image_repos", + "type": "boolean", + "description": "Automatically create and delete ECR repositories for image-based functions in non-guided deployments. A companion stack containing ECR repos for each function will be deployed along with the template stack. Automatically created image repositories will be deleted if the corresponding functions are removed." + }, + "metadata": { + "title": "metadata", + "type": "string", + "description": "Map of metadata to attach to ALL the artifacts that are referenced in the template." + }, + "notification_arns": { + "title": "notification_arns", + "type": "array", + "description": "ARNs of SNS topics that AWS Cloudformation associates with the stack." + }, + "tags": { + "title": "tags", + "type": "string", + "description": "List of tags to associate with the stack." + }, + "parameter_overrides": { + "title": "parameter_overrides", + "type": "string", + "description": "String that contains AWS CloudFormation parameter overrides encoded as key=value pairs." + }, + "signing_profiles": { + "title": "signing_profiles", + "type": "string", + "description": "A string that contains Code Sign configuration parameters as FunctionOrLayerNameToSign=SigningProfileName:SigningProfileOwner Since signing profile owner is optional, it could also be written as FunctionOrLayerNameToSign=SigningProfileName" + }, + "no_progressbar": { + "title": "no_progressbar", + "type": "boolean", + "description": "Does not showcase a progress bar when uploading artifacts to S3 and pushing docker images to ECR" + }, + "capabilities": { + "title": "capabilities", + "type": "array", + "description": "List of capabilities that one must specify before AWS Cloudformation can create certain stacks.\n\nAccepted Values: CAPABILITY_IAM, CAPABILITY_NAMED_IAM, CAPABILITY_RESOURCE_POLICY, CAPABILITY_AUTO_EXPAND.\n\nLearn more at: https://docs.aws.amazon.com/serverlessrepo/latest/devguide/acknowledging-application-capabilities.html" + }, + "profile": { + "title": "profile", + "type": "string", + "description": "Select a specific profile from your credential file to get AWS credentials." + }, + "region": { + "title": "region", + "type": "string", + "description": "Set the AWS Region of the service. (e.g. us-east-1)" + }, + "beta_features": { + "title": "beta_features", + "type": "boolean", + "description": "Enable/Disable beta features." + }, + "debug": { + "title": "debug", + "type": "boolean", + "description": "Turn on debug logging to print debug message generated by AWS SAM CLI and display timestamps." + } + } } }, "required": [ From f358403f2ee92b4e3ef8630aaa8629d753a358f0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 12 Jul 2023 23:16:43 +0000 Subject: [PATCH 21/32] chore(deps): bump cryptography from 41.0.1 to 41.0.2 in /requirements (#5496) Bumps [cryptography](https://github.com/pyca/cryptography) from 41.0.1 to 41.0.2. - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/41.0.1...41.0.2) --- updated-dependencies: - dependency-name: cryptography dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/reproducible-linux.txt | 44 ++++++++++++++++------------- requirements/reproducible-mac.txt | 44 ++++++++++++++++------------- 2 files changed, 48 insertions(+), 40 deletions(-) diff --git a/requirements/reproducible-linux.txt b/requirements/reproducible-linux.txt index a5fd1d3868..90da5d0c6a 100644 --- a/requirements/reproducible-linux.txt +++ b/requirements/reproducible-linux.txt @@ -212,26 +212,30 @@ cookiecutter==2.1.1 \ --hash=sha256:9f3ab027cec4f70916e28f03470bdb41e637a3ad354b4d65c765d93aad160022 \ --hash=sha256:f3982be8d9c53dac1261864013fdec7f83afd2e42ede6f6dd069c5e149c540d5 # via aws-sam-cli (setup.py) -cryptography==41.0.1 \ - --hash=sha256:059e348f9a3c1950937e1b5d7ba1f8e968508ab181e75fc32b879452f08356db \ - --hash=sha256:1a5472d40c8f8e91ff7a3d8ac6dfa363d8e3138b961529c996f3e2df0c7a411a \ - --hash=sha256:1a8e6c2de6fbbcc5e14fd27fb24414507cb3333198ea9ab1258d916f00bc3039 \ - --hash=sha256:1fee5aacc7367487b4e22484d3c7e547992ed726d14864ee33c0176ae43b0d7c \ - --hash=sha256:5d092fdfedaec4cbbffbf98cddc915ba145313a6fdaab83c6e67f4e6c218e6f3 \ - --hash=sha256:5f0ff6e18d13a3de56f609dd1fd11470918f770c6bd5d00d632076c727d35485 \ - --hash=sha256:7bfc55a5eae8b86a287747053140ba221afc65eb06207bedf6e019b8934b477c \ - --hash=sha256:7fa01527046ca5facdf973eef2535a27fec4cb651e4daec4d043ef63f6ecd4ca \ - --hash=sha256:8dde71c4169ec5ccc1087bb7521d54251c016f126f922ab2dfe6649170a3b8c5 \ - --hash=sha256:8f4ab7021127a9b4323537300a2acfb450124b2def3756f64dc3a3d2160ee4b5 \ - --hash=sha256:948224d76c4b6457349d47c0c98657557f429b4e93057cf5a2f71d603e2fc3a3 \ - --hash=sha256:9a6c7a3c87d595608a39980ebaa04d5a37f94024c9f24eb7d10262b92f739ddb \ - --hash=sha256:b46e37db3cc267b4dea1f56da7346c9727e1209aa98487179ee8ebed09d21e43 \ - --hash=sha256:b4ceb5324b998ce2003bc17d519080b4ec8d5b7b70794cbd2836101406a9be31 \ - --hash=sha256:cb33ccf15e89f7ed89b235cff9d49e2e62c6c981a6061c9c8bb47ed7951190bc \ - --hash=sha256:d198820aba55660b4d74f7b5fd1f17db3aa5eb3e6893b0a41b75e84e4f9e0e4b \ - --hash=sha256:d34579085401d3f49762d2f7d6634d6b6c2ae1242202e860f4d26b046e3a1006 \ - --hash=sha256:eb8163f5e549a22888c18b0d53d6bb62a20510060a22fd5a995ec8a05268df8a \ - --hash=sha256:f73bff05db2a3e5974a6fd248af2566134d8981fd7ab012e5dd4ddb1d9a70699 +cryptography==41.0.2 \ + --hash=sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711 \ + --hash=sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7 \ + --hash=sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd \ + --hash=sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e \ + --hash=sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58 \ + --hash=sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0 \ + --hash=sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d \ + --hash=sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83 \ + --hash=sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831 \ + --hash=sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766 \ + --hash=sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b \ + --hash=sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c \ + --hash=sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182 \ + --hash=sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f \ + --hash=sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa \ + --hash=sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4 \ + --hash=sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a \ + --hash=sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2 \ + --hash=sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76 \ + --hash=sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5 \ + --hash=sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee \ + --hash=sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f \ + --hash=sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14 # via pyopenssl dateparser==1.1.8 \ --hash=sha256:070b29b5bbf4b1ec2cd51c96ea040dc68a614de703910a91ad1abba18f9f379f \ diff --git a/requirements/reproducible-mac.txt b/requirements/reproducible-mac.txt index 0fb4cfca06..7846601df3 100644 --- a/requirements/reproducible-mac.txt +++ b/requirements/reproducible-mac.txt @@ -230,26 +230,30 @@ cookiecutter==2.1.1 \ --hash=sha256:9f3ab027cec4f70916e28f03470bdb41e637a3ad354b4d65c765d93aad160022 \ --hash=sha256:f3982be8d9c53dac1261864013fdec7f83afd2e42ede6f6dd069c5e149c540d5 # via aws-sam-cli (setup.py) -cryptography==41.0.1 \ - --hash=sha256:059e348f9a3c1950937e1b5d7ba1f8e968508ab181e75fc32b879452f08356db \ - --hash=sha256:1a5472d40c8f8e91ff7a3d8ac6dfa363d8e3138b961529c996f3e2df0c7a411a \ - --hash=sha256:1a8e6c2de6fbbcc5e14fd27fb24414507cb3333198ea9ab1258d916f00bc3039 \ - --hash=sha256:1fee5aacc7367487b4e22484d3c7e547992ed726d14864ee33c0176ae43b0d7c \ - --hash=sha256:5d092fdfedaec4cbbffbf98cddc915ba145313a6fdaab83c6e67f4e6c218e6f3 \ - --hash=sha256:5f0ff6e18d13a3de56f609dd1fd11470918f770c6bd5d00d632076c727d35485 \ - --hash=sha256:7bfc55a5eae8b86a287747053140ba221afc65eb06207bedf6e019b8934b477c \ - --hash=sha256:7fa01527046ca5facdf973eef2535a27fec4cb651e4daec4d043ef63f6ecd4ca \ - --hash=sha256:8dde71c4169ec5ccc1087bb7521d54251c016f126f922ab2dfe6649170a3b8c5 \ - --hash=sha256:8f4ab7021127a9b4323537300a2acfb450124b2def3756f64dc3a3d2160ee4b5 \ - --hash=sha256:948224d76c4b6457349d47c0c98657557f429b4e93057cf5a2f71d603e2fc3a3 \ - --hash=sha256:9a6c7a3c87d595608a39980ebaa04d5a37f94024c9f24eb7d10262b92f739ddb \ - --hash=sha256:b46e37db3cc267b4dea1f56da7346c9727e1209aa98487179ee8ebed09d21e43 \ - --hash=sha256:b4ceb5324b998ce2003bc17d519080b4ec8d5b7b70794cbd2836101406a9be31 \ - --hash=sha256:cb33ccf15e89f7ed89b235cff9d49e2e62c6c981a6061c9c8bb47ed7951190bc \ - --hash=sha256:d198820aba55660b4d74f7b5fd1f17db3aa5eb3e6893b0a41b75e84e4f9e0e4b \ - --hash=sha256:d34579085401d3f49762d2f7d6634d6b6c2ae1242202e860f4d26b046e3a1006 \ - --hash=sha256:eb8163f5e549a22888c18b0d53d6bb62a20510060a22fd5a995ec8a05268df8a \ - --hash=sha256:f73bff05db2a3e5974a6fd248af2566134d8981fd7ab012e5dd4ddb1d9a70699 +cryptography==41.0.2 \ + --hash=sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711 \ + --hash=sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7 \ + --hash=sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd \ + --hash=sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e \ + --hash=sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58 \ + --hash=sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0 \ + --hash=sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d \ + --hash=sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83 \ + --hash=sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831 \ + --hash=sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766 \ + --hash=sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b \ + --hash=sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c \ + --hash=sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182 \ + --hash=sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f \ + --hash=sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa \ + --hash=sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4 \ + --hash=sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a \ + --hash=sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2 \ + --hash=sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76 \ + --hash=sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5 \ + --hash=sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee \ + --hash=sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f \ + --hash=sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14 # via pyopenssl dateparser==1.1.8 \ --hash=sha256:070b29b5bbf4b1ec2cd51c96ea040dc68a614de703910a91ad1abba18f9f379f \ From 24a541c244cecd0c8ad65e73210a2da222c0448a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 13 Jul 2023 00:01:31 +0000 Subject: [PATCH 22/32] chore(deps): bump rich from 13.3.3 to 13.4.2 in /requirements (#5495) Bumps [rich](https://github.com/Textualize/rich) from 13.3.3 to 13.4.2. - [Release notes](https://github.com/Textualize/rich/releases) - [Changelog](https://github.com/Textualize/rich/blob/master/CHANGELOG.md) - [Commits](https://github.com/Textualize/rich/compare/v13.3.3...v13.4.2) --- updated-dependencies: - dependency-name: rich dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/reproducible-linux.txt | 6 +++--- requirements/reproducible-mac.txt | 6 +++--- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 38532b4714..7185ed7a05 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -17,7 +17,7 @@ serverlessrepo==0.1.10 aws_lambda_builders==1.34.0 tomlkit==0.11.8 watchdog==2.1.2 -rich~=13.3.3 +rich~=13.4.2 pyopenssl~=23.2.0 # Pin to <4.18 to until SAM-T no longer uses RefResolver jsonschema<4.18 diff --git a/requirements/reproducible-linux.txt b/requirements/reproducible-linux.txt index 90da5d0c6a..8c58260391 100644 --- a/requirements/reproducible-linux.txt +++ b/requirements/reproducible-linux.txt @@ -611,9 +611,9 @@ requests==2.31.0 \ # aws-sam-cli (setup.py) # cookiecutter # docker -rich==13.3.3 \ - --hash=sha256:540c7d6d26a1178e8e8b37e9ba44573a3cd1464ff6348b99ee7061b95d1c6333 \ - --hash=sha256:dc84400a9d842b3a9c5ff74addd8eb798d155f36c1c91303888e0a66850d2a15 +rich==13.4.2 \ + --hash=sha256:8f87bc7ee54675732fa66a05ebfe489e27264caeeff3728c945d25971b6485ec \ + --hash=sha256:d653d6bccede5844304c605d5aac802c7cf9621efd700b46c7ec2b51ea914898 # via aws-sam-cli (setup.py) ruamel-yaml==0.17.32 \ --hash=sha256:23cd2ed620231677564646b0c6a89d138b6822a0d78656df7abda5879ec4f447 \ diff --git a/requirements/reproducible-mac.txt b/requirements/reproducible-mac.txt index 7846601df3..294907ed47 100644 --- a/requirements/reproducible-mac.txt +++ b/requirements/reproducible-mac.txt @@ -641,9 +641,9 @@ requests==2.31.0 \ # aws-sam-cli (setup.py) # cookiecutter # docker -rich==13.3.3 \ - --hash=sha256:540c7d6d26a1178e8e8b37e9ba44573a3cd1464ff6348b99ee7061b95d1c6333 \ - --hash=sha256:dc84400a9d842b3a9c5ff74addd8eb798d155f36c1c91303888e0a66850d2a15 +rich==13.4.2 \ + --hash=sha256:8f87bc7ee54675732fa66a05ebfe489e27264caeeff3728c945d25971b6485ec \ + --hash=sha256:d653d6bccede5844304c605d5aac802c7cf9621efd700b46c7ec2b51ea914898 # via aws-sam-cli (setup.py) ruamel-yaml==0.17.32 \ --hash=sha256:23cd2ed620231677564646b0c6a89d138b6822a0d78656df7abda5879ec4f447 \ From eb18ec1aeeb3e1955112eda89fc4e89a6f688196 Mon Sep 17 00:00:00 2001 From: berzi <32619123+berzi@users.noreply.github.com> Date: Thu, 13 Jul 2023 02:04:22 +0200 Subject: [PATCH 23/32] Allow greater versions of typing_extensions (#5489) * allow greater versions of typing_extensions * Update requirements/base.txt --------- Co-authored-by: Wing Fung Lau <4760060+hawflau@users.noreply.github.com> --- requirements/base.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/base.txt b/requirements/base.txt index 7185ed7a05..09b9c02e71 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -23,7 +23,7 @@ pyopenssl~=23.2.0 jsonschema<4.18 # Needed for supporting Protocol in Python 3.7, Protocol class became public with python3.8 -typing_extensions~=4.4.0 +typing_extensions>=4.4.0,<5 # NOTE: regex is not a direct dependency of SAM CLI, exclude version 2021.10.8 due to not working on M1 Mac - https://github.com/mrabarnett/mrab-regex/issues/399 regex!=2021.10.8 From c59e3d510b83be76f963b34bd94a5b727ffc0022 Mon Sep 17 00:00:00 2001 From: Lucas <12496191+lucashuy@users.noreply.github.com> Date: Thu, 13 Jul 2023 00:17:18 -0700 Subject: [PATCH 24/32] chore: bump version to 1.91.0 (#5506) --- samcli/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samcli/__init__.py b/samcli/__init__.py index 1fea4bd55f..15e5e50faa 100644 --- a/samcli/__init__.py +++ b/samcli/__init__.py @@ -2,4 +2,4 @@ SAM CLI version """ -__version__ = "1.90.0" +__version__ = "1.91.0" From eedd1ee15435c9ce6815e051a93f270d5929c962 Mon Sep 17 00:00:00 2001 From: Daniel Mil <84205762+mildaniel@users.noreply.github.com> Date: Thu, 13 Jul 2023 11:37:03 -0700 Subject: [PATCH 25/32] fix: Pin click to be < 8.1.4 temporarily (#5511) --- requirements/base.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements/base.txt b/requirements/base.txt index 09b9c02e71..f72df5f046 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1,6 +1,7 @@ chevron~=0.12 # 8.1.4 of Click has an issue with the typing breaking the linter - https://github.com/pallets/click/issues/2558 -click~=8.0,!=8.1.4 +# Allow click to be greater than 8.1.4 when https://github.com/pallets/click/pull/2565 is released. +click~=8.0,<8.1.4 Flask<2.3 #Need to add latest lambda changes which will return invoke mode details boto3>=1.26.109,==1.* From 36f8bf970df7a8072eef58ca76405c40e1cc1c90 Mon Sep 17 00:00:00 2001 From: Lucas <12496191+lucashuy@users.noreply.github.com> Date: Thu, 13 Jul 2023 12:05:40 -0700 Subject: [PATCH 26/32] fix: Revert UTF-8 fixes #5485 and #5427 (#5512) * Revert "fix: use StringIO instead of BytesIO with StreamWriter (#5485)" This reverts commit 577d0d4e4671413f10cdaa5944ee53de91242841. * Revert "fix(invoke): Write in UTF-8 string instead of bytes (#5427)" This reverts commit d5ce6d5c25054e7a3ca06c6c2e89f113b7c61842. --- .../local/cli_common/invoke_context.py | 8 ++-- .../commands/remote/remote_invoke_context.py | 4 +- samcli/lib/docker/log_streamer.py | 28 ++++++----- samcli/lib/package/ecr_uploader.py | 4 +- samcli/lib/package/s3_uploader.py | 2 +- samcli/lib/utils/osutils.py | 13 ++--- samcli/lib/utils/stream_writer.py | 25 ++-------- samcli/lib/utils/subprocess_utils.py | 4 +- samcli/local/apigw/local_apigw_service.py | 4 +- samcli/local/docker/container.py | 47 ++++--------------- samcli/local/docker/lambda_image.py | 11 ++--- samcli/local/docker/manager.py | 6 +-- .../local_lambda_invoke_service.py | 2 +- samcli/local/services/base_local_service.py | 4 +- .../local/invoke/test_integrations_cli.py | 21 --------- .../local/cli_common/test_invoke_context.py | 2 +- tests/unit/lib/utils/test_osutils.py | 8 ++++ tests/unit/lib/utils/test_stream_writer.py | 11 ++--- tests/unit/lib/utils/test_subprocess_utils.py | 13 +++-- tests/unit/local/docker/test_container.py | 29 ++++++------ tests/unit/local/docker/test_lambda_image.py | 9 ++-- tests/unit/local/docker/test_manager.py | 24 +++------- .../local/services/test_base_local_service.py | 18 +++---- 23 files changed, 112 insertions(+), 185 deletions(-) diff --git a/samcli/commands/local/cli_common/invoke_context.py b/samcli/commands/local/cli_common/invoke_context.py index 9b001e4d20..3ffcae914d 100644 --- a/samcli/commands/local/cli_common/invoke_context.py +++ b/samcli/commands/local/cli_common/invoke_context.py @@ -7,7 +7,7 @@ import os from enum import Enum from pathlib import Path -from typing import Any, Dict, List, Optional, TextIO, Tuple, Type, cast +from typing import IO, Any, Dict, List, Optional, Tuple, Type, cast from samcli.commands._utils.template import TemplateFailedParsingException, TemplateNotFoundException from samcli.commands.exceptions import ContainersInitializationException @@ -195,7 +195,7 @@ def __init__( self._stacks: List[Stack] = None # type: ignore self._env_vars_value: Optional[Dict] = None self._container_env_vars_value: Optional[Dict] = None - self._log_file_handle: Optional[TextIO] = None + self._log_file_handle: Optional[IO] = None self._debug_context: Optional[DebugContext] = None self._layers_downloader: Optional[LayerDownloader] = None self._container_manager: Optional[ContainerManager] = None @@ -487,7 +487,7 @@ def _get_env_vars_value(filename: Optional[str]) -> Optional[Dict]: ) from ex @staticmethod - def _setup_log_file(log_file: Optional[str]) -> Optional[TextIO]: + def _setup_log_file(log_file: Optional[str]) -> Optional[IO]: """ Open a log file if necessary and return the file handle. This will create a file if it does not exist @@ -497,7 +497,7 @@ def _setup_log_file(log_file: Optional[str]) -> Optional[TextIO]: if not log_file: return None - return open(log_file, "w") + return open(log_file, "wb") @staticmethod def _get_debug_context( diff --git a/samcli/commands/remote/remote_invoke_context.py b/samcli/commands/remote/remote_invoke_context.py index d1294983bc..b710df8410 100644 --- a/samcli/commands/remote/remote_invoke_context.py +++ b/samcli/commands/remote/remote_invoke_context.py @@ -242,7 +242,7 @@ class DefaultRemoteInvokeResponseConsumer(RemoteInvokeConsumer[RemoteInvokeRespo _stream_writer: StreamWriter def consume(self, remote_invoke_response: RemoteInvokeResponse) -> None: - self._stream_writer.write_bytes(cast(str, remote_invoke_response.response).encode()) + self._stream_writer.write(cast(str, remote_invoke_response.response).encode()) @dataclass @@ -254,4 +254,4 @@ class DefaultRemoteInvokeLogConsumer(RemoteInvokeConsumer[RemoteInvokeLogOutput] _stream_writer: StreamWriter def consume(self, remote_invoke_response: RemoteInvokeLogOutput) -> None: - self._stream_writer.write_bytes(remote_invoke_response.log_output.encode()) + self._stream_writer.write(remote_invoke_response.log_output.encode()) diff --git a/samcli/lib/docker/log_streamer.py b/samcli/lib/docker/log_streamer.py index 3bb437781a..b013459bae 100644 --- a/samcli/lib/docker/log_streamer.py +++ b/samcli/lib/docker/log_streamer.py @@ -47,21 +47,23 @@ def stream_progress(self, logs: docker.APIClient.logs): else: curr_log_line_id = ids[_id] change_cursor_count = len(ids) - curr_log_line_id - self._stream.write_str( + self._stream.write( self._cursor_up_formatter.cursor_format(change_cursor_count) - + self._cursor_left_formatter.cursor_format() + + self._cursor_left_formatter.cursor_format(), + encode=True, ) self._stream_write(_id, status, stream, progress, error) if _id: - self._stream.write_str( + self._stream.write( self._cursor_down_formatter.cursor_format(change_cursor_count) - + self._cursor_left_formatter.cursor_format() + + self._cursor_left_formatter.cursor_format(), + encode=True, ) - self._stream.write_str(os.linesep) + self._stream.write(os.linesep, encode=True) - def _stream_write(self, _id: str, status: str, stream: str, progress: str, error: str): + def _stream_write(self, _id: str, status: str, stream: bytes, progress: str, error: str): """ Write stream information to stderr, if the stream information contains a log id, use the carriage return character to rewrite that particular line. @@ -78,14 +80,14 @@ def _stream_write(self, _id: str, status: str, stream: str, progress: str, error # NOTE(sriram-mv): Required for the purposes of when the cursor overflows existing terminal buffer. if not stream: - self._stream.write_str(os.linesep) - self._stream.write_str( - self._cursor_up_formatter.cursor_format() + self._cursor_left_formatter.cursor_format() + self._stream.write(os.linesep, encode=True) + self._stream.write( + self._cursor_up_formatter.cursor_format() + self._cursor_left_formatter.cursor_format(), encode=True ) - self._stream.write_str(self._cursor_clear_formatter.cursor_format()) + self._stream.write(self._cursor_clear_formatter.cursor_format(), encode=True) if not _id: - self._stream.write_str(stream) - self._stream.write_str(status) + self._stream.write(stream, encode=True) + self._stream.write(status, encode=True) else: - self._stream.write_str(f"\r{_id}: {status} {progress}") + self._stream.write(f"\r{_id}: {status} {progress}", encode=True) diff --git a/samcli/lib/package/ecr_uploader.py b/samcli/lib/package/ecr_uploader.py index 0393596b39..f2d4371407 100644 --- a/samcli/lib/package/ecr_uploader.py +++ b/samcli/lib/package/ecr_uploader.py @@ -2,8 +2,8 @@ Client for uploading packaged artifacts to ecr """ import base64 +import io import logging -from io import StringIO from typing import Dict import botocore @@ -94,7 +94,7 @@ def upload(self, image, resource_name): else: # we need to wait till the image got pushed to ecr, without this workaround sam sync for template # contains image always fail, because the provided ecr uri is not exist. - _log_streamer = LogStreamer(stream=StreamWriter(stream=StringIO(), auto_flush=True)) + _log_streamer = LogStreamer(stream=StreamWriter(stream=io.BytesIO(), auto_flush=True)) _log_streamer.stream_progress(push_logs) except (BuildError, APIError, LogStreamError) as ex: diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index 95981e92ed..fe141ada51 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -265,4 +265,4 @@ def on_progress(self, bytes_transferred, **kwargs): ) sys.stderr.flush() if int(percentage) == 100: # noqa: PLR2004 - sys.stderr.write(os.linesep) + sys.stderr.write("\n") diff --git a/samcli/lib/utils/osutils.py b/samcli/lib/utils/osutils.py index f722a8deac..d53dc9ffb5 100644 --- a/samcli/lib/utils/osutils.py +++ b/samcli/lib/utils/osutils.py @@ -1,7 +1,6 @@ """ Common OS utilities """ -import io import logging import os import shutil @@ -79,7 +78,7 @@ def rmtree_if_exists(path: Union[str, Path]): shutil.rmtree(path_obj) -def stdout() -> io.TextIOWrapper: +def stdout(): """ Returns the stdout as a byte stream in a Py2/PY3 compatible manner @@ -88,12 +87,10 @@ def stdout() -> io.TextIOWrapper: io.BytesIO Byte stream of Stdout """ - # Note(jfuss): sys.stdout is a type typing.TextIO but are initialized to - # io.TextIOWrapper. To make mypy and typing play well, tell mypy to ignore. - return sys.stdout # type:ignore[return-value] + return sys.stdout.buffer -def stderr() -> io.TextIOWrapper: +def stderr(): """ Returns the stderr as a byte stream in a Py2/PY3 compatible manner @@ -102,9 +99,7 @@ def stderr() -> io.TextIOWrapper: io.BytesIO Byte stream of stderr """ - # Note(jfuss): sys.stderr is a type typing.TextIO but are initialized to - # io.TextIOWrapper. To make mypy and typing play well, tell mypy to ignore. - return sys.stderr # type:ignore[return-value] + return sys.stderr.buffer def remove(path): diff --git a/samcli/lib/utils/stream_writer.py b/samcli/lib/utils/stream_writer.py index 99f72c1036..1fc62fa690 100644 --- a/samcli/lib/utils/stream_writer.py +++ b/samcli/lib/utils/stream_writer.py @@ -1,11 +1,10 @@ """ This class acts like a wrapper around output streams to provide any flexibility with output we need """ -from typing import TextIO, Union class StreamWriter: - def __init__(self, stream: TextIO, auto_flush: bool = False): + def __init__(self, stream, auto_flush=False): """ Instatiates new StreamWriter to the specified stream @@ -20,33 +19,19 @@ def __init__(self, stream: TextIO, auto_flush: bool = False): self._auto_flush = auto_flush @property - def stream(self) -> TextIO: + def stream(self): return self._stream - def write_bytes(self, output: Union[bytes, bytearray]): + def write(self, output, encode=False): """ Writes specified text to the underlying stream Parameters ---------- output bytes-like object - Bytes to write into buffer + Bytes to write """ - self._stream.buffer.write(output) - - if self._auto_flush: - self._stream.flush() - - def write_str(self, output: str): - """ - Writes specified text to the underlying stream - - Parameters - ---------- - output string object - String to write - """ - self._stream.write(output) + self._stream.write(output.encode() if encode else output) if self._auto_flush: self._stream.flush() diff --git a/samcli/lib/utils/subprocess_utils.py b/samcli/lib/utils/subprocess_utils.py index 1937a44eeb..e08ec12e49 100644 --- a/samcli/lib/utils/subprocess_utils.py +++ b/samcli/lib/utils/subprocess_utils.py @@ -34,7 +34,7 @@ def default_loading_pattern(stream_writer: Optional[StreamWriter] = None, loadin How frequently to generate the pattern """ stream_writer = stream_writer or StreamWriter(sys.stderr) - stream_writer.write_str(".") + stream_writer.write(".") stream_writer.flush() sleep(loading_pattern_rate) @@ -96,7 +96,7 @@ def _print_loading_pattern(): return_code = process.wait() keep_printing = False - stream_writer.write_str(os.linesep) + stream_writer.write(os.linesep) stream_writer.flush() process_stderr = _check_and_convert_stream_to_string(process.stderr) diff --git a/samcli/local/apigw/local_apigw_service.py b/samcli/local/apigw/local_apigw_service.py index b80b1fc2c2..f979b2e9a3 100644 --- a/samcli/local/apigw/local_apigw_service.py +++ b/samcli/local/apigw/local_apigw_service.py @@ -4,7 +4,7 @@ import json import logging from datetime import datetime -from io import StringIO +from io import BytesIO from time import time from typing import Any, Dict, List, Optional @@ -605,7 +605,7 @@ def _invoke_lambda_function(self, lambda_function_name: str, event: dict) -> str str A string containing the output from the Lambda function """ - with StringIO() as stdout: + with BytesIO() as stdout: event_str = json.dumps(event, sort_keys=True) stdout_writer = StreamWriter(stdout, auto_flush=True) diff --git a/samcli/local/docker/container.py b/samcli/local/docker/container.py index 7082d521f1..e70f7c2a1f 100644 --- a/samcli/local/docker/container.py +++ b/samcli/local/docker/container.py @@ -1,8 +1,6 @@ """ Representation of a generic Docker container """ -import io -import json import logging import os import pathlib @@ -11,7 +9,7 @@ import tempfile import threading import time -from typing import Iterator, Optional, Tuple, Union +from typing import Optional import docker import requests @@ -19,7 +17,6 @@ from samcli.lib.constants import DOCKER_MIN_API_VERSION from samcli.lib.utils.retry import retry -from samcli.lib.utils.stream_writer import StreamWriter from samcli.lib.utils.tar import extract_tarfile from samcli.local.docker.effective_user import ROOT_USER_ID, EffectiveUser @@ -317,7 +314,7 @@ def start(self, input_data=None): real_container.start() @retry(exc=requests.exceptions.RequestException, exc_raise=ContainerResponseException) - def wait_for_http_response(self, name, event, stdout) -> str: + def wait_for_http_response(self, name, event, stdout): # TODO(sriram-mv): `aws-lambda-rie` is in a mode where the function_name is always "function" # NOTE(sriram-mv): There is a connection timeout set on the http call to `aws-lambda-rie`, however there is not # a read time out for the response received from the server. @@ -327,7 +324,7 @@ def wait_for_http_response(self, name, event, stdout) -> str: data=event.encode("utf-8"), timeout=(self.RAPID_CONNECTION_TIMEOUT, None), ) - return json.dumps(json.loads(resp.content), ensure_ascii=False) + stdout.write(resp.content) def wait_for_result(self, full_path, event, stdout, stderr, start_timer=None): # NOTE(sriram-mv): Let logging happen in its own thread, so that a http request can be sent. @@ -347,21 +344,11 @@ def wait_for_result(self, full_path, event, stdout, stderr, start_timer=None): # start the timer for function timeout right before executing the function, as waiting for the socket # can take some time timer = start_timer() if start_timer else None - response = self.wait_for_http_response(full_path, event, stdout) + self.wait_for_http_response(full_path, event, stdout) if timer: timer.cancel() - # NOTE(jfuss): Adding a sleep after we get a response from the contianer but before we - # we write the response to ensure the last thing written to stdout is the container response - time.sleep(1) - stdout.write_str(response) - stdout.flush() - - def wait_for_logs( - self, - stdout: Optional[Union[StreamWriter, io.BytesIO, io.TextIOWrapper]] = None, - stderr: Optional[Union[StreamWriter, io.BytesIO, io.TextIOWrapper]] = None, - ): + def wait_for_logs(self, stdout=None, stderr=None): # Return instantly if we don't have to fetch any logs if not stdout and not stderr: return @@ -373,6 +360,7 @@ def wait_for_logs( # Fetch both stdout and stderr streams from Docker as a single iterator. logs_itr = real_container.attach(stream=True, logs=True, demux=True) + self._write_container_output(logs_itr, stdout=stdout, stderr=stderr) def _wait_for_socket_connection(self) -> None: @@ -423,11 +411,7 @@ def copy(self, from_container_path, to_host_path) -> None: extract_tarfile(file_obj=fp, unpack_dir=to_host_path) @staticmethod - def _write_container_output( - output_itr: Iterator[Tuple[bytes, bytes]], - stdout: Optional[Union[StreamWriter, io.BytesIO, io.TextIOWrapper]] = None, - stderr: Optional[Union[StreamWriter, io.BytesIO, io.TextIOWrapper]] = None, - ): + def _write_container_output(output_itr, stdout=None, stderr=None): """ Based on the data returned from the Container output, via the iterator, write it to the appropriate streams @@ -446,26 +430,13 @@ def _write_container_output( # Iterator returns a tuple of (stdout, stderr) for stdout_data, stderr_data in output_itr: if stdout_data and stdout: - Container._handle_data_writing(stdout, stdout_data) + stdout.write(stdout_data) if stderr_data and stderr: - Container._handle_data_writing(stderr, stderr_data) - + stderr.write(stderr_data) except Exception as ex: LOG.debug("Failed to get the logs from the container", exc_info=ex) - @staticmethod - def _handle_data_writing(output_stream: Union[StreamWriter, io.BytesIO, io.TextIOWrapper], output_data: bytes): - if isinstance(output_stream, StreamWriter): - output_stream.write_bytes(output_data) - output_stream.flush() - - if isinstance(output_stream, io.BytesIO): - output_stream.write(output_data) - - if isinstance(output_stream, io.TextIOWrapper): - output_stream.buffer.write(output_data) - @property def network_id(self): """ diff --git a/samcli/local/docker/lambda_image.py b/samcli/local/docker/lambda_image.py index 4dff0d820f..23f0a770d9 100644 --- a/samcli/local/docker/lambda_image.py +++ b/samcli/local/docker/lambda_image.py @@ -3,7 +3,6 @@ """ import hashlib import logging -import os import platform import re import sys @@ -227,7 +226,7 @@ def build(self, runtime, packagetype, image, layers, architecture, stream=None, or not runtime ): stream_writer = stream or StreamWriter(sys.stderr) - stream_writer.write_str("Building image...") + stream_writer.write("Building image...") stream_writer.flush() self._build_image( image if image else base_image, rapid_image, downloaded_layers, architecture, stream=stream_writer @@ -338,15 +337,15 @@ def set_item_permission(tar_info): platform=get_docker_platform(architecture), ) for log in resp_stream: - stream_writer.write_str(".") + stream_writer.write(".") stream_writer.flush() if "error" in log: - stream_writer.write_str(os.linesep) + stream_writer.write("\n") LOG.exception("Failed to build Docker Image") raise ImageBuildException("Error building docker image: {}".format(log["error"])) - stream_writer.write_str(os.linesep) + stream_writer.write("\n") except (docker.errors.BuildError, docker.errors.APIError) as ex: - stream_writer.write_str(os.linesep) + stream_writer.write("\n") LOG.exception("Failed to build Docker Image") raise ImageBuildException("Building Image failed.") from ex finally: diff --git a/samcli/local/docker/manager.py b/samcli/local/docker/manager.py index 6975828cd1..a035003bb0 100644 --- a/samcli/local/docker/manager.py +++ b/samcli/local/docker/manager.py @@ -168,16 +168,16 @@ def pull_image(self, image_name, tag=None, stream=None): raise DockerImagePullFailedException(str(ex)) from ex # io streams, especially StringIO, work only with unicode strings - stream_writer.write_str("\nFetching {}:{} Docker container image...".format(image_name, tag)) + stream_writer.write("\nFetching {}:{} Docker container image...".format(image_name, tag)) # Each line contains information on progress of the pull. Each line is a JSON string for _ in result_itr: # For every line, print a dot to show progress - stream_writer.write_str(".") + stream_writer.write(".") stream_writer.flush() # We are done. Go to the next line - stream_writer.write_str("\n") + stream_writer.write("\n") def has_image(self, image_name): """ diff --git a/samcli/local/lambda_service/local_lambda_invoke_service.py b/samcli/local/lambda_service/local_lambda_invoke_service.py index 546066449c..c6d7506fb2 100644 --- a/samcli/local/lambda_service/local_lambda_invoke_service.py +++ b/samcli/local/lambda_service/local_lambda_invoke_service.py @@ -162,7 +162,7 @@ def _invoke_request_handler(self, function_name): request_data = request_data.decode("utf-8") - stdout_stream = io.StringIO() + stdout_stream = io.BytesIO() stdout_stream_writer = StreamWriter(stdout_stream, auto_flush=True) try: diff --git a/samcli/local/services/base_local_service.py b/samcli/local/services/base_local_service.py index 671d48888c..fcb7cd95ae 100644 --- a/samcli/local/services/base_local_service.py +++ b/samcli/local/services/base_local_service.py @@ -82,7 +82,7 @@ def service_response(body, headers, status_code): class LambdaOutputParser: @staticmethod - def get_lambda_output(stdout_stream: io.StringIO) -> Tuple[str, bool]: + def get_lambda_output(stdout_stream: io.BytesIO) -> Tuple[str, bool]: """ This method will extract read the given stream and return the response from Lambda function separated out from any log statements it might have outputted. Logs end up in the stdout stream if the Lambda function @@ -100,7 +100,7 @@ def get_lambda_output(stdout_stream: io.StringIO) -> Tuple[str, bool]: bool If the response is an error/exception from the container """ - lambda_response = stdout_stream.getvalue() + lambda_response = stdout_stream.getvalue().decode("utf-8") # When the Lambda Function returns an Error/Exception, the output is added to the stdout of the container. From # our perspective, the container returned some value, which is not always true. Since the output is the only diff --git a/tests/integration/local/invoke/test_integrations_cli.py b/tests/integration/local/invoke/test_integrations_cli.py index 70711459d6..3604fc4010 100644 --- a/tests/integration/local/invoke/test_integrations_cli.py +++ b/tests/integration/local/invoke/test_integrations_cli.py @@ -291,27 +291,6 @@ def test_invoke_returns_expected_result_when_no_event_given(self): self.assertEqual(process.returncode, 0) self.assertEqual("{}", process_stdout.decode("utf-8")) - @pytest.mark.flaky(reruns=3) - def test_invoke_returns_utf8(self): - command_list = InvokeIntegBase.get_command_list( - "EchoEventFunction", template_path=self.template_path, event_path=self.event_utf8_path - ) - - process = Popen(command_list, stdout=PIPE) - try: - stdout, _ = process.communicate(timeout=TIMEOUT) - except TimeoutExpired: - process.kill() - raise - - process_stdout = stdout.strip() - - with open(self.event_utf8_path) as f: - expected_output = json.dumps(json.load(f), ensure_ascii=False) - - self.assertEqual(process.returncode, 0) - self.assertEqual(expected_output, process_stdout.decode("utf-8")) - @pytest.mark.flaky(reruns=3) def test_invoke_with_env_using_parameters(self): command_list = InvokeIntegBase.get_command_list( diff --git a/tests/unit/commands/local/cli_common/test_invoke_context.py b/tests/unit/commands/local/cli_common/test_invoke_context.py index a9ba9d8b22..3cab08c82a 100644 --- a/tests/unit/commands/local/cli_common/test_invoke_context.py +++ b/tests/unit/commands/local/cli_common/test_invoke_context.py @@ -1106,7 +1106,7 @@ def test_must_open_file_for_writing(self): with patch("samcli.commands.local.cli_common.invoke_context.open", m): InvokeContext._setup_log_file(filename) - m.assert_called_with(filename, "w") + m.assert_called_with(filename, "wb") class TestInvokeContext_get_debug_context(TestCase): diff --git a/tests/unit/lib/utils/test_osutils.py b/tests/unit/lib/utils/test_osutils.py index 6f7a6cf4df..bf4794f2c4 100644 --- a/tests/unit/lib/utils/test_osutils.py +++ b/tests/unit/lib/utils/test_osutils.py @@ -34,7 +34,9 @@ def test_raises_on_cleanup_failure(self, rmdir_mock): @patch("os.rmdir") def test_handles_ignore_error_case(self, rmdir_mock): rmdir_mock.side_effect = OSError("fail") + dir_name = None with osutils.mkdir_temp(ignore_errors=True) as tempdir: + dir_name = tempdir self.assertTrue(os.path.exists(tempdir)) @@ -42,6 +44,9 @@ class Test_stderr(TestCase): def test_must_return_sys_stderr(self): expected_stderr = sys.stderr + if sys.version_info.major > 2: + expected_stderr = sys.stderr.buffer + self.assertEqual(expected_stderr, osutils.stderr()) @@ -49,6 +54,9 @@ class Test_stdout(TestCase): def test_must_return_sys_stdout(self): expected_stdout = sys.stdout + if sys.version_info.major > 2: + expected_stdout = sys.stdout.buffer + self.assertEqual(expected_stdout, osutils.stdout()) diff --git a/tests/unit/lib/utils/test_stream_writer.py b/tests/unit/lib/utils/test_stream_writer.py index a6875b59da..cb48955850 100644 --- a/tests/unit/lib/utils/test_stream_writer.py +++ b/tests/unit/lib/utils/test_stream_writer.py @@ -1,7 +1,6 @@ """ Tests for StreamWriter """ -import io from unittest import TestCase @@ -12,13 +11,13 @@ class TestStreamWriter(TestCase): def test_must_write_to_stream(self): - buffer = b"something" + buffer = "something" stream_mock = Mock() writer = StreamWriter(stream_mock) - writer.write_bytes(buffer) + writer.write(buffer) - stream_mock.buffer.write.assert_called_once_with(buffer) + stream_mock.write.assert_called_once_with(buffer) def test_must_flush_underlying_stream(self): stream_mock = Mock() @@ -32,7 +31,7 @@ def test_auto_flush_must_be_off_by_default(self): stream_mock = Mock() writer = StreamWriter(stream_mock) - writer.write_str("something") + writer.write("something") stream_mock.flush.assert_not_called() @@ -47,6 +46,6 @@ def test_when_auto_flush_on_flush_after_each_write(self): writer = StreamWriter(stream_mock, True) for line in lines: - writer.write_str(line) + writer.write(line) flush_mock.assert_called_once_with() flush_mock.reset_mock() diff --git a/tests/unit/lib/utils/test_subprocess_utils.py b/tests/unit/lib/utils/test_subprocess_utils.py index a9d39afdd2..969f06085b 100644 --- a/tests/unit/lib/utils/test_subprocess_utils.py +++ b/tests/unit/lib/utils/test_subprocess_utils.py @@ -11,7 +11,6 @@ from parameterized import parameterized from unittest.mock import patch, Mock, call, ANY -from samcli.lib.utils.stream_writer import StreamWriter from samcli.lib.utils.subprocess_utils import ( default_loading_pattern, invoke_subprocess_with_loading_pattern, @@ -65,7 +64,7 @@ def test_loader_stream_uses_passed_in_stdout( @patch("samcli.lib.utils.subprocess_utils.Popen") def test_loader_raises_exception_non_zero_exit_code(self, patched_Popen): standard_error = "an error has occurred" - mock_stream_writer = Mock(spec=StreamWriter) + mock_stream_writer = Mock() mock_process = Mock() mock_process.returncode = 1 mock_process.stdout = None @@ -75,7 +74,7 @@ def test_loader_raises_exception_non_zero_exit_code(self, patched_Popen): with self.assertRaises(LoadingPatternError) as ex: invoke_subprocess_with_loading_pattern({"args": ["ls"]}, mock_pattern, mock_stream_writer) self.assertIn(standard_error, ex.exception.message) - mock_stream_writer.write_str.assert_called_once_with(os.linesep) + mock_stream_writer.write.assert_called_once_with(os.linesep) mock_stream_writer.flush.assert_called_once_with() @patch("samcli.lib.utils.subprocess_utils.Popen") @@ -96,19 +95,19 @@ def test_loader_raises_exception_bad_process(self, patched_Popen): @patch("samcli.lib.utils.subprocess_utils.StreamWriter") def test_default_pattern_default_stream_writer(self, patched_stream_writer): - stream_writer_mock = Mock(spec=StreamWriter) + stream_writer_mock = Mock() patched_stream_writer.return_value = stream_writer_mock default_loading_pattern(loading_pattern_rate=0.01) patched_stream_writer.assert_called_once_with(sys.stderr) - stream_writer_mock.write_str.assert_called_once_with(".") + stream_writer_mock.write.assert_called_once_with(".") stream_writer_mock.flush.assert_called_once_with() @patch("samcli.lib.utils.subprocess_utils.StreamWriter") def test_default_pattern(self, patched_stream_writer): - stream_writer_mock = Mock(spec=StreamWriter) + stream_writer_mock = Mock() default_loading_pattern(stream_writer_mock, 0.01) patched_stream_writer.assert_not_called() - stream_writer_mock.write_str.assert_called_once_with(".") + stream_writer_mock.write.assert_called_once_with(".") stream_writer_mock.flush.assert_called_once_with() @parameterized.expand([("hello".encode("utf-8"), "hello"), ("hello", "hello")]) diff --git a/tests/unit/local/docker/test_container.py b/tests/unit/local/docker/test_container.py index c3a5671ea3..14f292c0ce 100644 --- a/tests/unit/local/docker/test_container.py +++ b/tests/unit/local/docker/test_container.py @@ -9,7 +9,6 @@ from requests import RequestException from samcli.lib.utils.packagetype import IMAGE -from samcli.lib.utils.stream_writer import StreamWriter from samcli.local.docker.container import Container, ContainerResponseException, ContainerConnectionTimeoutException @@ -697,17 +696,17 @@ def test_wait_for_result_waits_for_socket_before_post_request(self, patched_time self.assertEqual(mock_requests.post.call_count, 0) def test_write_container_output_successful(self): - stdout_mock = Mock(spec=StreamWriter) - stderr_mock = Mock(spec=StreamWriter) + stdout_mock = Mock() + stderr_mock = Mock() def _output_iterator(): - yield b"Hello", None - yield None, b"World" + yield "Hello", None + yield None, "World" raise ValueError("The pipe has been ended.") Container._write_container_output(_output_iterator(), stdout_mock, stderr_mock) - stdout_mock.assert_has_calls([call.write_bytes(b"Hello")]) - stderr_mock.assert_has_calls([call.write_bytes(b"World")]) + stdout_mock.assert_has_calls([call.write("Hello")]) + stderr_mock.assert_has_calls([call.write("World")]) class TestContainer_wait_for_logs(TestCase): @@ -761,33 +760,33 @@ class TestContainer_write_container_output(TestCase): def setUp(self): self.output_itr = [(b"stdout1", None), (None, b"stderr1"), (b"stdout2", b"stderr2"), (None, None)] - self.stdout_mock = Mock(spec=StreamWriter) - self.stderr_mock = Mock(spec=StreamWriter) + self.stdout_mock = Mock() + self.stderr_mock = Mock() def test_must_write_stdout_and_stderr_data(self): # All the invalid frames must be ignored Container._write_container_output(self.output_itr, stdout=self.stdout_mock, stderr=self.stderr_mock) - self.stdout_mock.write_bytes.assert_has_calls([call(b"stdout1"), call(b"stdout2")]) + self.stdout_mock.write.assert_has_calls([call(b"stdout1"), call(b"stdout2")]) - self.stderr_mock.write_bytes.assert_has_calls([call(b"stderr1"), call(b"stderr2")]) + self.stderr_mock.write.assert_has_calls([call(b"stderr1"), call(b"stderr2")]) def test_must_write_only_stderr(self): # All the invalid frames must be ignored Container._write_container_output(self.output_itr, stdout=None, stderr=self.stderr_mock) - self.stdout_mock.write_bytes.assert_not_called() + self.stdout_mock.write.assert_not_called() - self.stderr_mock.write_bytes.assert_has_calls([call(b"stderr1"), call(b"stderr2")]) + self.stderr_mock.write.assert_has_calls([call(b"stderr1"), call(b"stderr2")]) def test_must_write_only_stdout(self): Container._write_container_output(self.output_itr, stdout=self.stdout_mock, stderr=None) - self.stdout_mock.write_bytes.assert_has_calls([call(b"stdout1"), call(b"stdout2")]) + self.stdout_mock.write.assert_has_calls([call(b"stdout1"), call(b"stdout2")]) - self.stderr_mock.write_bytes.assert_not_called() # stderr must never be called + self.stderr_mock.write.assert_not_called() # stderr must never be called class TestContainer_wait_for_socket_connection(TestCase): diff --git a/tests/unit/local/docker/test_lambda_image.py b/tests/unit/local/docker/test_lambda_image.py index 03b57be804..1e8f936d98 100644 --- a/tests/unit/local/docker/test_lambda_image.py +++ b/tests/unit/local/docker/test_lambda_image.py @@ -1,3 +1,4 @@ +import io import tempfile from unittest import TestCase @@ -270,7 +271,7 @@ def test_force_building_image_that_doesnt_already_exists( docker_client_mock.images.get.side_effect = ImageNotFound("image not found") docker_client_mock.images.list.return_value = [] - stream = Mock() + stream = io.StringIO() lambda_image = LambdaImage(layer_downloader_mock, False, True, docker_client=docker_client_mock) actual_image_id = lambda_image.build( @@ -310,7 +311,7 @@ def test_force_building_image_on_daemon_404( docker_client_mock.images.get.side_effect = NotFound("image not found") docker_client_mock.images.list.return_value = [] - stream = Mock() + stream = io.StringIO() lambda_image = LambdaImage(layer_downloader_mock, False, True, docker_client=docker_client_mock) actual_image_id = lambda_image.build( @@ -350,7 +351,7 @@ def test_docker_distribution_api_error_on_daemon_api_error( docker_client_mock.images.get.side_effect = APIError("error from docker daemon") docker_client_mock.images.list.return_value = [] - stream = Mock() + stream = io.StringIO() lambda_image = LambdaImage(layer_downloader_mock, False, True, docker_client=docker_client_mock) with self.assertRaises(DockerDistributionAPIError): @@ -376,7 +377,7 @@ def test_not_force_building_image_that_doesnt_already_exists( docker_client_mock.images.get.side_effect = ImageNotFound("image not found") docker_client_mock.images.list.return_value = [] - stream = Mock() + stream = io.StringIO() lambda_image = LambdaImage(layer_downloader_mock, False, False, docker_client=docker_client_mock) actual_image_id = lambda_image.build( diff --git a/tests/unit/local/docker/test_manager.py b/tests/unit/local/docker/test_manager.py index 4cb42bbd02..ada69903ea 100644 --- a/tests/unit/local/docker/test_manager.py +++ b/tests/unit/local/docker/test_manager.py @@ -1,6 +1,8 @@ """ Tests container manager """ + +import io import importlib from unittest import TestCase from unittest.mock import Mock, patch, MagicMock, ANY, call @@ -216,29 +218,17 @@ def setUp(self): self.manager = ContainerManager(docker_client=self.mock_docker_client) def test_must_pull_and_print_progress_dots(self): - stream = Mock() + stream = io.StringIO() pull_result = [1, 2, 3, 4, 5, 6, 7, 8, 9, 0] self.mock_docker_client.api.pull.return_value = pull_result - expected_stream_calls = [ - call(f"\nFetching {self.image_name}:latest Docker container image..."), - call("."), - call("."), - call("."), - call("."), - call("."), - call("."), - call("."), - call("."), - call("."), - call("."), - call("\n"), - ] + expected_stream_output = "\nFetching {}:latest Docker container image...{}\n".format( + self.image_name, "." * len(pull_result) # Progress bar will print one dot per response from pull API + ) self.manager.pull_image(self.image_name, stream=stream) self.mock_docker_client.api.pull.assert_called_with(self.image_name, stream=True, decode=True, tag="latest") - - stream.write_str.assert_has_calls(expected_stream_calls) + self.assertEqual(stream.getvalue(), expected_stream_output) def test_must_raise_if_image_not_found(self): msg = "some error" diff --git a/tests/unit/local/services/test_base_local_service.py b/tests/unit/local/services/test_base_local_service.py index 34bc44c193..fec13e25c9 100644 --- a/tests/unit/local/services/test_base_local_service.py +++ b/tests/unit/local/services/test_base_local_service.py @@ -66,17 +66,17 @@ def test_create_returns_not_implemented(self): class TestLambdaOutputParser(TestCase): @parameterized.expand( [ - param("with mixed data and json response", 'data\n{"a": "b"}', 'data\n{"a": "b"}'), - param("with response as string", "response", "response"), - param("with json response only", '{"a": "b"}', '{"a": "b"}'), - param("with one new line and json", '\n{"a": "b"}', '\n{"a": "b"}'), - param("with response only as string", "this is the response line", "this is the response line"), - param("with whitespaces", 'data\n{"a": "b"} \n\n\n', 'data\n{"a": "b"} \n\n\n'), - param("with empty data", "", ""), - param("with just new lines", "\n\n", "\n\n"), + param("with mixed data and json response", b'data\n{"a": "b"}', 'data\n{"a": "b"}'), + param("with response as string", b"response", "response"), + param("with json response only", b'{"a": "b"}', '{"a": "b"}'), + param("with one new line and json", b'\n{"a": "b"}', '\n{"a": "b"}'), + param("with response only as string", b"this is the response line", "this is the response line"), + param("with whitespaces", b'data\n{"a": "b"} \n\n\n', 'data\n{"a": "b"} \n\n\n'), + param("with empty data", b"", ""), + param("with just new lines", b"\n\n", "\n\n"), param( "with whitespaces", - "\n \n \n", + b"\n \n \n", "\n \n \n", ), ] From dd96898d0caf7f767964d21d9b42594f1c07f982 Mon Sep 17 00:00:00 2001 From: Daniel Mil <84205762+mildaniel@users.noreply.github.com> Date: Thu, 13 Jul 2023 13:25:04 -0700 Subject: [PATCH 27/32] fix: Bump timeout (#5510) --- .../testdata/esbuild-datadog-integration/template.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/end_to_end/testdata/esbuild-datadog-integration/template.yaml b/tests/end_to_end/testdata/esbuild-datadog-integration/template.yaml index 3341557f3e..8f055237df 100644 --- a/tests/end_to_end/testdata/esbuild-datadog-integration/template.yaml +++ b/tests/end_to_end/testdata/esbuild-datadog-integration/template.yaml @@ -16,6 +16,7 @@ Resources: Properties: Handler: /opt/nodejs/node_modules/datadog-lambda-js/handler.handler Runtime: nodejs18.x + Timeout: 15 Environment: Variables: DD_LAMBDA_HANDLER: main.lambdaHandler From 84184943aeec2423f1071a88297f7722edf6cbf2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 13 Jul 2023 22:10:01 +0000 Subject: [PATCH 28/32] chore(deps): bump botocore from 1.29.135 to 1.31.2 in /requirements (#5497) * chore(deps): bump botocore from 1.29.135 to 1.31.2 in /requirements Bumps [botocore](https://github.com/boto/botocore) from 1.29.135 to 1.31.2. - [Changelog](https://github.com/boto/botocore/blob/develop/CHANGELOG.rst) - [Commits](https://github.com/boto/botocore/compare/1.29.135...1.31.2) --- updated-dependencies: - dependency-name: botocore dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Update reproducible-linux.txt * Update reproducible-mac.txt * Update reproducible-linux.txt * Update reproducible-mac.txt * group all dependabot changes in one PR --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Wing Fung Lau <4760060+hawflau@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/dev.txt | 4 ++-- requirements/reproducible-linux.txt | 30 ++++++++++++++--------------- requirements/reproducible-mac.txt | 30 ++++++++++++++--------------- 4 files changed, 33 insertions(+), 33 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index f72df5f046..57a67e078b 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -32,4 +32,4 @@ regex!=2021.10.8 tzlocal==3.0 #Adding cfn-lint dependency for SAM validate -cfn-lint~=0.77.9 +cfn-lint~=0.78.1 diff --git a/requirements/dev.txt b/requirements/dev.txt index 2421d05095..55f6a457c1 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -8,7 +8,7 @@ pytest-cov==4.1.0 # mypy adds new rules in new minor versions, which could cause our PR check to fail # here we fix its version and upgrade it manually in the future mypy==1.3.0 -boto3-stubs[apigateway,cloudformation,ecr,iam,lambda,s3,schemas,secretsmanager,signer,stepfunctions,sts,xray]==1.26.131 +boto3-stubs[apigateway,cloudformation,ecr,iam,lambda,s3,schemas,secretsmanager,signer,stepfunctions,sts,xray]==1.28.2 types-pywin32==306.0.0.2 types-PyYAML==6.0.12 types-chevron==0.14.2.4 @@ -19,7 +19,7 @@ types-colorama==0.4.15.11 types-dateparser==1.1.4.9 types-docutils==0.20.0.1 types-jsonschema==4.17.0.8 -types-pyOpenSSL==23.2.0.0 +types-pyOpenSSL==23.2.0.1 types-requests==2.31.0.1 types-urllib3==1.26.25.13 diff --git a/requirements/reproducible-linux.txt b/requirements/reproducible-linux.txt index 8c58260391..8e993c68de 100644 --- a/requirements/reproducible-linux.txt +++ b/requirements/reproducible-linux.txt @@ -29,16 +29,16 @@ binaryornot==0.4.4 \ --hash=sha256:359501dfc9d40632edc9fac890e19542db1a287bbcfa58175b66658392018061 \ --hash=sha256:b8b71173c917bddcd2c16070412e369c3ed7f0528926f70cac18a6c97fd563e4 # via cookiecutter -boto3==1.26.134 \ - --hash=sha256:2da4a4caa789312ae73d29be9d3e79ce3328e3aaf7e9de0da6f243455ad3aae6 \ - --hash=sha256:a49b47621c71adfa952127222809ae50867ae4fd249bb932eb1a98519baefa40 +boto3==1.28.2 \ + --hash=sha256:0d53fe604dc30edded21906bc56b30a7684f0715f4f6897307d53f8184997368 \ + --hash=sha256:9933e40dc9ac72deac45cecce2df020e3bf8d0d537538d2b361c17d1cee807cc # via # aws-sam-cli (setup.py) # aws-sam-translator # serverlessrepo -botocore==1.29.135 \ - --hash=sha256:06502a4473924ef60ac0de908385a5afab9caee6c5b49cf6a330fab0d76ddf5f \ - --hash=sha256:0c61d4e5e04fe5329fa65da6b31492ef9d0d5174d72fc2af69de2ed0f87804ca +botocore==1.31.2 \ + --hash=sha256:67a475bec9e52d495a358b34e219ef7f62907e83b87e5bc712528f998bd46dab \ + --hash=sha256:d368ac0b58e2b9025b9c397e4a4f86d71788913ee619263506885a866a4f6811 # via # boto3 # s3transfer @@ -112,9 +112,9 @@ cffi==1.15.1 \ --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 # via cryptography -cfn-lint==0.77.9 \ - --hash=sha256:7c1e631b723b521234d92d4081934291b256dba28d723ddb7ff105215fe40020 \ - --hash=sha256:f95b503f7465ee1f2f89ddf32289ea03a517f08c366bb8e6a5d6773a11e5a1aa +cfn-lint==0.78.1 \ + --hash=sha256:2dacb19d5f70c0d49f466302507707cfa4914f65b8fc9310ae3771a273cec044 \ + --hash=sha256:46118362b2e13b79ba3ae6b3c28b7df5fcd437c06f5bcc3384d13a2defdb7d06 # via aws-sam-cli (setup.py) chardet==5.1.0 \ --hash=sha256:0d62712b956bc154f85fb0a266e2a3c5913c2967e00348701b32411d6def31e5 \ @@ -472,9 +472,9 @@ python-slugify==8.0.1 \ --hash=sha256:70ca6ea68fe63ecc8fa4fcf00ae651fc8a5d02d93dcd12ae6d4fc7ca46c4d395 \ --hash=sha256:ce0d46ddb668b3be82f4ed5e503dbc33dd815d83e2eb6824211310d3fb172a27 # via cookiecutter -pytz==2023.2 \ - --hash=sha256:8a8baaf1e237175b02f5c751eea67168043a749c843989e2b3015aa1ad9db68b \ - --hash=sha256:a27dcf612c05d2ebde626f7d506555f10dfc815b3eddccfaadfc7d99b11c9a07 +pytz==2023.3 \ + --hash=sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588 \ + --hash=sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb # via dateparser pyyaml==5.4.1 \ --hash=sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf \ @@ -689,9 +689,9 @@ tomlkit==0.11.8 \ --hash=sha256:8c726c4c202bdb148667835f68d68780b9a003a9ec34167b6c673b38eff2a171 \ --hash=sha256:9330fc7faa1db67b541b28e62018c17d20be733177d290a13b24c62d1614e0c3 # via aws-sam-cli (setup.py) -typing-extensions==4.4.0 \ - --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ - --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e +typing-extensions==4.7.1 \ + --hash=sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36 \ + --hash=sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2 # via # aws-sam-cli (setup.py) # aws-sam-translator diff --git a/requirements/reproducible-mac.txt b/requirements/reproducible-mac.txt index 294907ed47..7e7740fe0c 100644 --- a/requirements/reproducible-mac.txt +++ b/requirements/reproducible-mac.txt @@ -47,16 +47,16 @@ binaryornot==0.4.4 \ --hash=sha256:359501dfc9d40632edc9fac890e19542db1a287bbcfa58175b66658392018061 \ --hash=sha256:b8b71173c917bddcd2c16070412e369c3ed7f0528926f70cac18a6c97fd563e4 # via cookiecutter -boto3==1.26.134 \ - --hash=sha256:2da4a4caa789312ae73d29be9d3e79ce3328e3aaf7e9de0da6f243455ad3aae6 \ - --hash=sha256:a49b47621c71adfa952127222809ae50867ae4fd249bb932eb1a98519baefa40 +boto3==1.28.2 \ + --hash=sha256:0d53fe604dc30edded21906bc56b30a7684f0715f4f6897307d53f8184997368 \ + --hash=sha256:9933e40dc9ac72deac45cecce2df020e3bf8d0d537538d2b361c17d1cee807cc # via # aws-sam-cli (setup.py) # aws-sam-translator # serverlessrepo -botocore==1.29.135 \ - --hash=sha256:06502a4473924ef60ac0de908385a5afab9caee6c5b49cf6a330fab0d76ddf5f \ - --hash=sha256:0c61d4e5e04fe5329fa65da6b31492ef9d0d5174d72fc2af69de2ed0f87804ca +botocore==1.31.2 \ + --hash=sha256:67a475bec9e52d495a358b34e219ef7f62907e83b87e5bc712528f998bd46dab \ + --hash=sha256:d368ac0b58e2b9025b9c397e4a4f86d71788913ee619263506885a866a4f6811 # via # boto3 # s3transfer @@ -130,9 +130,9 @@ cffi==1.15.1 \ --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 # via cryptography -cfn-lint==0.77.9 \ - --hash=sha256:7c1e631b723b521234d92d4081934291b256dba28d723ddb7ff105215fe40020 \ - --hash=sha256:f95b503f7465ee1f2f89ddf32289ea03a517f08c366bb8e6a5d6773a11e5a1aa +cfn-lint==0.78.1 \ + --hash=sha256:2dacb19d5f70c0d49f466302507707cfa4914f65b8fc9310ae3771a273cec044 \ + --hash=sha256:46118362b2e13b79ba3ae6b3c28b7df5fcd437c06f5bcc3384d13a2defdb7d06 # via aws-sam-cli (setup.py) chardet==5.1.0 \ --hash=sha256:0d62712b956bc154f85fb0a266e2a3c5913c2967e00348701b32411d6def31e5 \ @@ -502,9 +502,9 @@ python-slugify==8.0.1 \ --hash=sha256:70ca6ea68fe63ecc8fa4fcf00ae651fc8a5d02d93dcd12ae6d4fc7ca46c4d395 \ --hash=sha256:ce0d46ddb668b3be82f4ed5e503dbc33dd815d83e2eb6824211310d3fb172a27 # via cookiecutter -pytz==2023.2 \ - --hash=sha256:8a8baaf1e237175b02f5c751eea67168043a749c843989e2b3015aa1ad9db68b \ - --hash=sha256:a27dcf612c05d2ebde626f7d506555f10dfc815b3eddccfaadfc7d99b11c9a07 +pytz==2023.3 \ + --hash=sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588 \ + --hash=sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb # via dateparser pyyaml==5.4.1 \ --hash=sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf \ @@ -718,9 +718,9 @@ tomlkit==0.11.8 \ --hash=sha256:8c726c4c202bdb148667835f68d68780b9a003a9ec34167b6c673b38eff2a171 \ --hash=sha256:9330fc7faa1db67b541b28e62018c17d20be733177d290a13b24c62d1614e0c3 # via aws-sam-cli (setup.py) -typing-extensions==4.4.0 \ - --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ - --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e +typing-extensions==4.7.1 \ + --hash=sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36 \ + --hash=sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2 # via # aws-sam-cli (setup.py) # aws-sam-translator From 6a412126dc7c8a7ec268034ea8d3e76de936733f Mon Sep 17 00:00:00 2001 From: Leonardo Gama Date: Fri, 14 Jul 2023 08:35:16 -0700 Subject: [PATCH 29/32] Add field for JSON arrays --- schema/samcli.json | 10 ++++++++-- schema/schema.py | 6 +++++- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/schema/samcli.json b/schema/samcli.json index ad8289fb37..444a5b2634 100644 --- a/schema/samcli.json +++ b/schema/samcli.json @@ -264,7 +264,10 @@ "notification_arns": { "title": "notification_arns", "type": "array", - "description": "ARNs of SNS topics that AWS Cloudformation associates with the stack." + "description": "ARNs of SNS topics that AWS Cloudformation associates with the stack.", + "items": { + "type": "string" + } }, "tags": { "title": "tags", @@ -289,7 +292,10 @@ "capabilities": { "title": "capabilities", "type": "array", - "description": "List of capabilities that one must specify before AWS Cloudformation can create certain stacks.\n\nAccepted Values: CAPABILITY_IAM, CAPABILITY_NAMED_IAM, CAPABILITY_RESOURCE_POLICY, CAPABILITY_AUTO_EXPAND.\n\nLearn more at: https://docs.aws.amazon.com/serverlessrepo/latest/devguide/acknowledging-application-capabilities.html" + "description": "List of capabilities that one must specify before AWS Cloudformation can create certain stacks.\n\nAccepted Values: CAPABILITY_IAM, CAPABILITY_NAMED_IAM, CAPABILITY_RESOURCE_POLICY, CAPABILITY_AUTO_EXPAND.\n\nLearn more at: https://docs.aws.amazon.com/serverlessrepo/latest/devguide/acknowledging-application-capabilities.html", + "items": { + "type": "string" + } }, "profile": { "title": "profile", diff --git a/schema/schema.py b/schema/schema.py index 9741f21db6..891a14a990 100644 --- a/schema/schema.py +++ b/schema/schema.py @@ -32,6 +32,7 @@ class SamCliParameterSchema: type: str description: str = "" default: Optional[Any] = None + items: Optional[str] = None choices: Optional[Any] = None def to_schema(self) -> Dict[str, Any]: @@ -40,6 +41,8 @@ def to_schema(self) -> Dict[str, Any]: param.update({"title": self.name, "type": self.type, "description": self.description}) if self.default: param.update({"default": self.default}) + if self.items: + param.update({"items": {"type": self.items}}) if self.choices: param.update({"enum": self.choices}) return param @@ -136,7 +139,8 @@ def format_param(param: click.core.Option) -> SamCliParameterSchema: formatted_param_type = param_type or "string" formatted_param: SamCliParameterSchema = SamCliParameterSchema( - param.name or "", formatted_param_type, clean_text(param.help or "") + param.name or "", formatted_param_type, clean_text(param.help or ""), + items="string" if formatted_param_type == "array" else None ) if param.default: From b49018d31a13fef87692d31c61c5db08a32e98e0 Mon Sep 17 00:00:00 2001 From: Leonardo Gama Date: Fri, 14 Jul 2023 08:40:26 -0700 Subject: [PATCH 30/32] Remove config_env and config_file params --- schema/samcli.json | 14 +------------- schema/schema.py | 16 +++++++++++++--- 2 files changed, 14 insertions(+), 16 deletions(-) diff --git a/schema/samcli.json b/schema/samcli.json index 444a5b2634..74ac8d6eca 100644 --- a/schema/samcli.json +++ b/schema/samcli.json @@ -143,21 +143,9 @@ "properties": { "parameters": { "title": "Parameters for the deploy command", - "description": "Available parameters for the deploy command:\n* config_env:\nEnvironment name specifying default parameter values in the configuration file.\n* config_file:\nConfiguration file containing default parameter values.\n* guided:\nSpecify this flag to allow SAM CLI to guide you through the deployment using guided prompts.\n* template_file:\nAWS SAM template which references built artifacts for resources in the template. (if applicable)\n* no_execute_changeset:\nIndicates whether to execute the change set. Specify this flag to view stack changes before executing the change set.\n* fail_on_empty_changeset:\nSpecify whether AWS SAM CLI should return a non-zero exit code if there are no changes to be made to the stack. Defaults to a non-zero exit code.\n* confirm_changeset:\nPrompt to confirm if the computed changeset is to be deployed by SAM CLI.\n* disable_rollback:\nPreserves the state of previously provisioned resources when an operation fails.\n* on_failure:\nProvide an action to determine what will happen when a stack fails to create. Three actions are available:\n\n- ROLLBACK: This will rollback a stack to a previous known good state.\n\n- DELETE: The stack will rollback to a previous state if one exists, otherwise the stack will be deleted.\n\n- DO_NOTHING: The stack will not rollback or delete, this is the same as disabling rollback.\n\nDefault behaviour is ROLLBACK.\n\n\n\nThis option is mutually exclusive with --disable-rollback/--no-disable-rollback. You can provide\n--on-failure or --disable-rollback/--no-disable-rollback but not both at the same time.\n* stack_name:\nName of the AWS CloudFormation stack.\n* s3_bucket:\nAWS S3 bucket where artifacts referenced in the template are uploaded.\n* image_repository:\nAWS ECR repository URI where artifacts referenced in the template are uploaded.\n* image_repositories:\nMapping of Function Logical ID to AWS ECR Repository URI.\n\nExample: Function_Logical_ID=ECR_Repo_Uri\nThis option can be specified multiple times.\n* force_upload:\nIndicates whether to override existing files in the S3 bucket. Specify this flag to upload artifacts even if they match existing artifacts in the S3 bucket.\n* s3_prefix:\nPrefix name that is added to the artifact's name when it is uploaded to the AWS S3 bucket.\n* kms_key_id:\nThe ID of an AWS KMS key that is used to encrypt artifacts that are at rest in the AWS S3 bucket.\n* role_arn:\nARN of an IAM role that AWS Cloudformation assumes when executing a deployment change set.\n* use_json:\nIndicates whether to use JSON as the format for the output AWS CloudFormation template. YAML is used by default.\n* resolve_s3:\nAutomatically resolve AWS S3 bucket for non-guided deployments. Enabling this option will also create a managed default AWS S3 bucket for you. If one does not provide a --s3-bucket value, the managed bucket will be used. Do not use --guided with this option.\n* resolve_image_repos:\nAutomatically create and delete ECR repositories for image-based functions in non-guided deployments. A companion stack containing ECR repos for each function will be deployed along with the template stack. Automatically created image repositories will be deleted if the corresponding functions are removed.\n* metadata:\nMap of metadata to attach to ALL the artifacts that are referenced in the template.\n* notification_arns:\nARNs of SNS topics that AWS Cloudformation associates with the stack.\n* tags:\nList of tags to associate with the stack.\n* parameter_overrides:\nString that contains AWS CloudFormation parameter overrides encoded as key=value pairs.\n* signing_profiles:\nA string that contains Code Sign configuration parameters as FunctionOrLayerNameToSign=SigningProfileName:SigningProfileOwner Since signing profile owner is optional, it could also be written as FunctionOrLayerNameToSign=SigningProfileName\n* no_progressbar:\nDoes not showcase a progress bar when uploading artifacts to S3 and pushing docker images to ECR\n* capabilities:\nList of capabilities that one must specify before AWS Cloudformation can create certain stacks.\n\nAccepted Values: CAPABILITY_IAM, CAPABILITY_NAMED_IAM, CAPABILITY_RESOURCE_POLICY, CAPABILITY_AUTO_EXPAND.\n\nLearn more at: https://docs.aws.amazon.com/serverlessrepo/latest/devguide/acknowledging-application-capabilities.html\n* profile:\nSelect a specific profile from your credential file to get AWS credentials.\n* region:\nSet the AWS Region of the service. (e.g. us-east-1)\n* beta_features:\nEnable/Disable beta features.\n* debug:\nTurn on debug logging to print debug message generated by AWS SAM CLI and display timestamps.", + "description": "Available parameters for the deploy command:\n* guided:\nSpecify this flag to allow SAM CLI to guide you through the deployment using guided prompts.\n* template_file:\nAWS SAM template which references built artifacts for resources in the template. (if applicable)\n* no_execute_changeset:\nIndicates whether to execute the change set. Specify this flag to view stack changes before executing the change set.\n* fail_on_empty_changeset:\nSpecify whether AWS SAM CLI should return a non-zero exit code if there are no changes to be made to the stack. Defaults to a non-zero exit code.\n* confirm_changeset:\nPrompt to confirm if the computed changeset is to be deployed by SAM CLI.\n* disable_rollback:\nPreserves the state of previously provisioned resources when an operation fails.\n* on_failure:\nProvide an action to determine what will happen when a stack fails to create. Three actions are available:\n\n- ROLLBACK: This will rollback a stack to a previous known good state.\n\n- DELETE: The stack will rollback to a previous state if one exists, otherwise the stack will be deleted.\n\n- DO_NOTHING: The stack will not rollback or delete, this is the same as disabling rollback.\n\nDefault behaviour is ROLLBACK.\n\n\n\nThis option is mutually exclusive with --disable-rollback/--no-disable-rollback. You can provide\n--on-failure or --disable-rollback/--no-disable-rollback but not both at the same time.\n* stack_name:\nName of the AWS CloudFormation stack.\n* s3_bucket:\nAWS S3 bucket where artifacts referenced in the template are uploaded.\n* image_repository:\nAWS ECR repository URI where artifacts referenced in the template are uploaded.\n* image_repositories:\nMapping of Function Logical ID to AWS ECR Repository URI.\n\nExample: Function_Logical_ID=ECR_Repo_Uri\nThis option can be specified multiple times.\n* force_upload:\nIndicates whether to override existing files in the S3 bucket. Specify this flag to upload artifacts even if they match existing artifacts in the S3 bucket.\n* s3_prefix:\nPrefix name that is added to the artifact's name when it is uploaded to the AWS S3 bucket.\n* kms_key_id:\nThe ID of an AWS KMS key that is used to encrypt artifacts that are at rest in the AWS S3 bucket.\n* role_arn:\nARN of an IAM role that AWS Cloudformation assumes when executing a deployment change set.\n* use_json:\nIndicates whether to use JSON as the format for the output AWS CloudFormation template. YAML is used by default.\n* resolve_s3:\nAutomatically resolve AWS S3 bucket for non-guided deployments. Enabling this option will also create a managed default AWS S3 bucket for you. If one does not provide a --s3-bucket value, the managed bucket will be used. Do not use --guided with this option.\n* resolve_image_repos:\nAutomatically create and delete ECR repositories for image-based functions in non-guided deployments. A companion stack containing ECR repos for each function will be deployed along with the template stack. Automatically created image repositories will be deleted if the corresponding functions are removed.\n* metadata:\nMap of metadata to attach to ALL the artifacts that are referenced in the template.\n* notification_arns:\nARNs of SNS topics that AWS Cloudformation associates with the stack.\n* tags:\nList of tags to associate with the stack.\n* parameter_overrides:\nString that contains AWS CloudFormation parameter overrides encoded as key=value pairs.\n* signing_profiles:\nA string that contains Code Sign configuration parameters as FunctionOrLayerNameToSign=SigningProfileName:SigningProfileOwner Since signing profile owner is optional, it could also be written as FunctionOrLayerNameToSign=SigningProfileName\n* no_progressbar:\nDoes not showcase a progress bar when uploading artifacts to S3 and pushing docker images to ECR\n* capabilities:\nList of capabilities that one must specify before AWS Cloudformation can create certain stacks.\n\nAccepted Values: CAPABILITY_IAM, CAPABILITY_NAMED_IAM, CAPABILITY_RESOURCE_POLICY, CAPABILITY_AUTO_EXPAND.\n\nLearn more at: https://docs.aws.amazon.com/serverlessrepo/latest/devguide/acknowledging-application-capabilities.html\n* profile:\nSelect a specific profile from your credential file to get AWS credentials.\n* region:\nSet the AWS Region of the service. (e.g. us-east-1)\n* beta_features:\nEnable/Disable beta features.\n* debug:\nTurn on debug logging to print debug message generated by AWS SAM CLI and display timestamps.", "type": "object", "properties": { - "config_env": { - "title": "config_env", - "type": "string", - "description": "Environment name specifying default parameter values in the configuration file.", - "default": "default" - }, - "config_file": { - "title": "config_file", - "type": "string", - "description": "Configuration file containing default parameter values.", - "default": "samconfig.toml" - }, "guided": { "title": "guided", "type": "boolean", diff --git a/schema/schema.py b/schema/schema.py index 891a14a990..89151f599e 100644 --- a/schema/schema.py +++ b/schema/schema.py @@ -139,8 +139,10 @@ def format_param(param: click.core.Option) -> SamCliParameterSchema: formatted_param_type = param_type or "string" formatted_param: SamCliParameterSchema = SamCliParameterSchema( - param.name or "", formatted_param_type, clean_text(param.help or ""), - items="string" if formatted_param_type == "array" else None + param.name or "", + formatted_param_type, + clean_text(param.help or ""), + items="string" if formatted_param_type == "array" else None, ) if param.default: @@ -154,7 +156,15 @@ def format_param(param: click.core.Option) -> SamCliParameterSchema: def get_params_from_command(cli) -> List[SamCliParameterSchema]: """Given a CLI object, return a list of all parameters in that CLI, formatted as SamCliParameterSchema objects.""" - return [format_param(param) for param in cli.params if param.name and isinstance(param, click.core.Option)] + params_to_exclude = [ + "config_env", # shouldn't allow different environment from where the config is being read from + "config_file", # shouldn't allow reading another file within current file + ] + return [ + format_param(param) + for param in cli.params + if param.name and isinstance(param, click.core.Option) and param.name not in params_to_exclude + ] def retrieve_command_structure(package_name: str) -> List[SamCliCommandSchema]: From e42c29b83ecf57132d91116a501c4f83eac41bc6 Mon Sep 17 00:00:00 2001 From: Leonardo Gama Date: Fri, 14 Jul 2023 10:32:50 -0700 Subject: [PATCH 31/32] Linting and formatting --- schema/schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schema/schema.py b/schema/schema.py index 89151f599e..02f909cd75 100644 --- a/schema/schema.py +++ b/schema/schema.py @@ -37,7 +37,7 @@ class SamCliParameterSchema: def to_schema(self) -> Dict[str, Any]: """Return the JSON schema representation of the SAM CLI parameter.""" - param = {} + param: Dict[str, Any] = {} param.update({"title": self.name, "type": self.type, "description": self.description}) if self.default: param.update({"default": self.default}) From 412f2839a9270e22b7e1e074c1a635218f9aa169 Mon Sep 17 00:00:00 2001 From: Leonardo Gama Date: Fri, 14 Jul 2023 11:51:17 -0700 Subject: [PATCH 32/32] Fix failing tests --- samcli/lib/config/samconfig.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samcli/lib/config/samconfig.py b/samcli/lib/config/samconfig.py index 77a76ea028..2cd0d41199 100644 --- a/samcli/lib/config/samconfig.py +++ b/samcli/lib/config/samconfig.py @@ -87,7 +87,7 @@ def get_all(self, cmd_names, section, env=DEFAULT_ENV): self.document = self._read() config_content = self.document.get(env, {}) - params = config_content.get(self._to_key(cmd_names), {}).get(section, {}) + params = config_content.get(self.to_key(cmd_names), {}).get(section, {}) if DEFAULT_GLOBAL_CMDNAME in config_content: global_params = config_content.get(DEFAULT_GLOBAL_CMDNAME, {}).get(section, {}) global_params.update(params.copy())