From 157a7bc379f7f91849f850f97fc96e843359840c Mon Sep 17 00:00:00 2001 From: Michael Brewer Date: Tue, 12 Jan 2021 08:16:14 -0800 Subject: [PATCH 1/8] chore: general simplifications and cleanup (#255) * chore: General simplications and cleanup * Use simplified logic checks * Use list and dict comprehension * Return value directly * Does the same and removes that if/else logic Co-authored-by: Joris Conijn * Does the same and removes that if/else logic Co-authored-by: Joris Conijn * Removed unneeded blank line. * fix: Fix indents from accepting github changes * fix: Temp fix to correct unit tests * chore: Make easier to read Co-authored-by: Joris Conijn --- aws_lambda_powertools/logging/filters.py | 2 +- aws_lambda_powertools/logging/formatter.py | 4 +--- aws_lambda_powertools/logging/logger.py | 9 ++++----- aws_lambda_powertools/metrics/base.py | 5 +---- aws_lambda_powertools/tracing/tracer.py | 16 +++++++--------- .../utilities/parameters/dynamodb.py | 6 +----- .../utilities/parser/envelopes/cloudwatch.py | 7 +++---- .../utilities/parser/envelopes/dynamodb.py | 16 +++++++--------- .../utilities/parser/envelopes/kinesis.py | 9 ++++----- .../utilities/parser/envelopes/sns.py | 5 +---- .../utilities/parser/envelopes/sqs.py | 5 +---- 11 files changed, 31 insertions(+), 53 deletions(-) diff --git a/aws_lambda_powertools/logging/filters.py b/aws_lambda_powertools/logging/filters.py index ce0770a222d..5bb77f04f78 100644 --- a/aws_lambda_powertools/logging/filters.py +++ b/aws_lambda_powertools/logging/filters.py @@ -13,4 +13,4 @@ def filter(self, record): # noqa: A003 created by loggers who don't have a handler. """ logger = record.name - return False if self.logger in logger else True + return self.logger not in logger diff --git a/aws_lambda_powertools/logging/formatter.py b/aws_lambda_powertools/logging/formatter.py index 647abf33a8a..ded813ac652 100644 --- a/aws_lambda_powertools/logging/formatter.py +++ b/aws_lambda_powertools/logging/formatter.py @@ -49,9 +49,7 @@ def _build_root_keys(**kwargs): @staticmethod def _get_latest_trace_id(): xray_trace_id = os.getenv("_X_AMZN_TRACE_ID") - trace_id = xray_trace_id.split(";")[0].replace("Root=", "") if xray_trace_id else None - - return trace_id + return xray_trace_id.split(";")[0].replace("Root=", "") if xray_trace_id else None def update_formatter(self, **kwargs): self.format_dict.update(kwargs) diff --git a/aws_lambda_powertools/logging/logger.py b/aws_lambda_powertools/logging/logger.py index 74663ec7b4a..e03f542e6c6 100644 --- a/aws_lambda_powertools/logging/logger.py +++ b/aws_lambda_powertools/logging/logger.py @@ -291,9 +291,10 @@ def _get_log_level(level: Union[str, int]) -> Union[str, int]: return level log_level: str = level or os.getenv("LOG_LEVEL") - log_level = log_level.upper() if log_level is not None else logging.INFO + if log_level is None: + return logging.INFO - return log_level + return log_level.upper() @staticmethod def _get_caller_filename(): @@ -303,9 +304,7 @@ def _get_caller_filename(): # Before previous frame => Caller frame = inspect.currentframe() caller_frame = frame.f_back.f_back.f_back - filename = caller_frame.f_globals["__name__"] - - return filename + return caller_frame.f_globals["__name__"] def set_package_logger( diff --git a/aws_lambda_powertools/metrics/base.py b/aws_lambda_powertools/metrics/base.py index 5b04e8a3f67..b54b72bf58a 100644 --- a/aws_lambda_powertools/metrics/base.py +++ b/aws_lambda_powertools/metrics/base.py @@ -240,10 +240,7 @@ def add_dimension(self, name: str, value: str): # Cast value to str according to EMF spec # Majority of values are expected to be string already, so # checking before casting improves performance in most cases - if isinstance(value, str): - self.dimension_set[name] = value - else: - self.dimension_set[name] = str(value) + self.dimension_set[name] = value if isinstance(value, str) else str(value) def add_metadata(self, key: str, value: Any): """Adds high cardinal metadata for metrics object diff --git a/aws_lambda_powertools/tracing/tracer.py b/aws_lambda_powertools/tracing/tracer.py index 0aab57ee39a..079f662a9ad 100644 --- a/aws_lambda_powertools/tracing/tracer.py +++ b/aws_lambda_powertools/tracing/tracer.py @@ -454,24 +454,22 @@ async def async_tasks(): method_name = f"{method.__name__}" if inspect.iscoroutinefunction(method): - decorate = self._decorate_async_function( + return self._decorate_async_function( method=method, capture_response=capture_response, method_name=method_name ) elif inspect.isgeneratorfunction(method): - decorate = self._decorate_generator_function( + return self._decorate_generator_function( method=method, capture_response=capture_response, method_name=method_name ) elif hasattr(method, "__wrapped__") and inspect.isgeneratorfunction(method.__wrapped__): - decorate = self._decorate_generator_function_with_context_manager( + return self._decorate_generator_function_with_context_manager( method=method, capture_response=capture_response, method_name=method_name ) else: - decorate = self._decorate_sync_function( + return self._decorate_sync_function( method=method, capture_response=capture_response, method_name=method_name ) - return decorate - def _decorate_async_function(self, method: Callable = None, capture_response: bool = True, method_name: str = None): @functools.wraps(method) async def decorate(*args, **kwargs): @@ -650,9 +648,9 @@ def __build_config( self._config["provider"] = provider if provider is not None else self._config["provider"] self._config["auto_patch"] = auto_patch if auto_patch is not None else self._config["auto_patch"] - self._config["service"] = is_service if is_service else self._config["service"] - self._config["disabled"] = is_disabled if is_disabled else self._config["disabled"] - self._config["patch_modules"] = patch_modules if patch_modules else self._config["patch_modules"] + self._config["service"] = is_service or self._config["service"] + self._config["disabled"] = is_disabled or self._config["disabled"] + self._config["patch_modules"] = patch_modules or self._config["patch_modules"] @classmethod def _reset_config(cls): diff --git a/aws_lambda_powertools/utilities/parameters/dynamodb.py b/aws_lambda_powertools/utilities/parameters/dynamodb.py index 4132697f0b9..dcb447b6060 100644 --- a/aws_lambda_powertools/utilities/parameters/dynamodb.py +++ b/aws_lambda_powertools/utilities/parameters/dynamodb.py @@ -206,8 +206,4 @@ def _get_multiple(self, path: str, **sdk_options) -> Dict[str, str]: response = self.table.query(**sdk_options) items.extend(response.get("Items", [])) - retval = {} - for item in items: - retval[item[self.sort_attr]] = item[self.value_attr] - - return retval + return {item[self.sort_attr]: item[self.value_attr] for item in items} diff --git a/aws_lambda_powertools/utilities/parser/envelopes/cloudwatch.py b/aws_lambda_powertools/utilities/parser/envelopes/cloudwatch.py index e4ecdd8b5ac..da5dda1ed23 100644 --- a/aws_lambda_powertools/utilities/parser/envelopes/cloudwatch.py +++ b/aws_lambda_powertools/utilities/parser/envelopes/cloudwatch.py @@ -36,7 +36,6 @@ def parse(self, data: Optional[Union[Dict[str, Any], Any]], model: Model) -> Lis logger.debug(f"Parsing incoming data with SNS model {CloudWatchLogsModel}") parsed_envelope = CloudWatchLogsModel.parse_obj(data) logger.debug(f"Parsing CloudWatch records in `body` with {model}") - output = [] - for record in parsed_envelope.awslogs.decoded_data.logEvents: - output.append(self._parse(data=record.message, model=model)) - return output + return [ + self._parse(data=record.message, model=model) for record in parsed_envelope.awslogs.decoded_data.logEvents + ] diff --git a/aws_lambda_powertools/utilities/parser/envelopes/dynamodb.py b/aws_lambda_powertools/utilities/parser/envelopes/dynamodb.py index 38d19ffe1c6..3315196af3a 100644 --- a/aws_lambda_powertools/utilities/parser/envelopes/dynamodb.py +++ b/aws_lambda_powertools/utilities/parser/envelopes/dynamodb.py @@ -32,13 +32,11 @@ def parse(self, data: Optional[Union[Dict[str, Any], Any]], model: Model) -> Lis """ logger.debug(f"Parsing incoming data with DynamoDB Stream model {DynamoDBStreamModel}") parsed_envelope = DynamoDBStreamModel.parse_obj(data) - output = [] logger.debug(f"Parsing DynamoDB Stream new and old records with {model}") - for record in parsed_envelope.Records: - output.append( - { - "NewImage": self._parse(data=record.dynamodb.NewImage, model=model), - "OldImage": self._parse(data=record.dynamodb.OldImage, model=model), - } - ) - return output + return [ + { + "NewImage": self._parse(data=record.dynamodb.NewImage, model=model), + "OldImage": self._parse(data=record.dynamodb.OldImage, model=model), + } + for record in parsed_envelope.Records + ] diff --git a/aws_lambda_powertools/utilities/parser/envelopes/kinesis.py b/aws_lambda_powertools/utilities/parser/envelopes/kinesis.py index 97ad7bffec7..b1e47adf46e 100644 --- a/aws_lambda_powertools/utilities/parser/envelopes/kinesis.py +++ b/aws_lambda_powertools/utilities/parser/envelopes/kinesis.py @@ -11,7 +11,7 @@ class KinesisDataStreamEnvelope(BaseEnvelope): """Kinesis Data Stream Envelope to extract array of Records - The record's data parameter is a base64 encoded string which is parsed into a bytes array, + The record's data parameter is a base64 encoded string which is parsed into a bytes array, though it can also be a JSON encoded string. Regardless of its type it'll be parsed into a BaseModel object. @@ -36,8 +36,7 @@ def parse(self, data: Optional[Union[Dict[str, Any], Any]], model: Model) -> Lis """ logger.debug(f"Parsing incoming data with Kinesis model {KinesisDataStreamModel}") parsed_envelope: KinesisDataStreamModel = KinesisDataStreamModel.parse_obj(data) - output = [] logger.debug(f"Parsing Kinesis records in `body` with {model}") - for record in parsed_envelope.Records: - output.append(self._parse(data=record.kinesis.data.decode("utf-8"), model=model)) - return output + return [ + self._parse(data=record.kinesis.data.decode("utf-8"), model=model) for record in parsed_envelope.Records + ] diff --git a/aws_lambda_powertools/utilities/parser/envelopes/sns.py b/aws_lambda_powertools/utilities/parser/envelopes/sns.py index f703bb46c63..d4a78199d07 100644 --- a/aws_lambda_powertools/utilities/parser/envelopes/sns.py +++ b/aws_lambda_powertools/utilities/parser/envelopes/sns.py @@ -35,8 +35,5 @@ def parse(self, data: Optional[Union[Dict[str, Any], Any]], model: Model) -> Lis """ logger.debug(f"Parsing incoming data with SNS model {SnsModel}") parsed_envelope = SnsModel.parse_obj(data) - output = [] logger.debug(f"Parsing SNS records in `body` with {model}") - for record in parsed_envelope.Records: - output.append(self._parse(data=record.Sns.Message, model=model)) - return output + return [self._parse(data=record.Sns.Message, model=model) for record in parsed_envelope.Records] diff --git a/aws_lambda_powertools/utilities/parser/envelopes/sqs.py b/aws_lambda_powertools/utilities/parser/envelopes/sqs.py index 3ed479ffa31..acc8f9900dd 100644 --- a/aws_lambda_powertools/utilities/parser/envelopes/sqs.py +++ b/aws_lambda_powertools/utilities/parser/envelopes/sqs.py @@ -35,8 +35,5 @@ def parse(self, data: Optional[Union[Dict[str, Any], Any]], model: Model) -> Lis """ logger.debug(f"Parsing incoming data with SQS model {SqsModel}") parsed_envelope = SqsModel.parse_obj(data) - output = [] logger.debug(f"Parsing SQS records in `body` with {model}") - for record in parsed_envelope.Records: - output.append(self._parse(data=record.body, model=model)) - return output + return [self._parse(data=record.body, model=model) for record in parsed_envelope.Records] From 1131541045a1f11ddecaa8d9655fc6993eb59b5d Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Tue, 12 Jan 2021 17:54:30 +0100 Subject: [PATCH 2/8] feat: support extra parameter in Logger messages (#257) * improv: refactor formatter to ease changes * feat: support for extra keys in log messages * docs: add extra param, temp toc fix --- aws_lambda_powertools/logging/formatter.py | 143 ++++++++++++++++----- docs/content/core/logger.mdx | 93 ++++++++++++-- tests/functional/test_logger.py | 19 +++ 3 files changed, 211 insertions(+), 44 deletions(-) diff --git a/aws_lambda_powertools/logging/formatter.py b/aws_lambda_powertools/logging/formatter.py index ded813ac652..063b97ab21c 100644 --- a/aws_lambda_powertools/logging/formatter.py +++ b/aws_lambda_powertools/logging/formatter.py @@ -1,6 +1,31 @@ import json import logging import os +from typing import Dict, Iterable, Optional, Union + +STD_LOGGING_KEYS = ( + "name", + "msg", + "args", + "levelname", + "levelno", + "pathname", + "filename", + "module", + "exc_info", + "exc_text", + "stack_info", + "lineno", + "funcName", + "created", + "msecs", + "relativeCreated", + "thread", + "threadName", + "processName", + "process", + "asctime", +) class JsonFormatter(logging.Formatter): @@ -30,12 +55,12 @@ def __init__(self, **kwargs): # Set the default unserializable function, by default values will be cast as str. self.default_json_formatter = kwargs.pop("json_default", str) # Set the insertion order for the log messages - self.format_dict = dict.fromkeys(kwargs.pop("log_record_order", ["level", "location", "message", "timestamp"])) + self.log_format = dict.fromkeys(kwargs.pop("log_record_order", ["level", "location", "message", "timestamp"])) self.reserved_keys = ["timestamp", "level", "location"] # Set the date format used by `asctime` super(JsonFormatter, self).__init__(datefmt=kwargs.pop("datefmt", None)) - self.format_dict.update(self._build_root_keys(**kwargs)) + self.log_format.update(self._build_root_keys(**kwargs)) @staticmethod def _build_root_keys(**kwargs): @@ -52,47 +77,95 @@ def _get_latest_trace_id(): return xray_trace_id.split(";")[0].replace("Root=", "") if xray_trace_id else None def update_formatter(self, **kwargs): - self.format_dict.update(kwargs) + self.log_format.update(kwargs) - def format(self, record): # noqa: A003 - record_dict = record.__dict__.copy() - record_dict["asctime"] = self.formatTime(record, self.datefmt) + @staticmethod + def _extract_log_message(log_record: logging.LogRecord) -> Union[Dict, str, bool, Iterable]: + """Extract message from log record and attempt to JSON decode it + + Parameters + ---------- + log_record : logging.LogRecord + Log record to extract message from + + Returns + ------- + message: Union[Dict, str, bool, Iterable] + Extracted message + """ + if isinstance(log_record.msg, dict): + return log_record.msg - log_dict = {} + message: str = log_record.getMessage() - for key, value in self.format_dict.items(): - if value and key in self.reserved_keys: - # converts default logging expr to its record value - # e.g. '%(asctime)s' to '2020-04-24 09:35:40,698' - log_dict[key] = value % record_dict - else: - log_dict[key] = value + # Attempt to decode non-str messages e.g. msg = '{"x": "y"}' + try: + message = json.loads(log_record.msg) + except (json.decoder.JSONDecodeError, TypeError, ValueError): + pass + + return message + + def _extract_log_exception(self, log_record: logging.LogRecord) -> Optional[str]: + """Format traceback information, if available + + Parameters + ---------- + log_record : logging.LogRecord + Log record to extract message from + + Returns + ------- + log_record: Optional[str] + Log record with constant traceback info + """ + if log_record.exc_info: + return self.formatException(log_record.exc_info) + + return None - if isinstance(record_dict["msg"], dict): - log_dict["message"] = record_dict["msg"] - else: - log_dict["message"] = record.getMessage() + def _extract_log_keys(self, log_record: logging.LogRecord) -> Dict: + """Extract and parse custom and reserved log keys - # Attempt to decode the message as JSON, if so, merge it with the - # overall message for clarity. - try: - log_dict["message"] = json.loads(log_dict["message"]) - except (json.decoder.JSONDecodeError, TypeError, ValueError): - pass + Parameters + ---------- + log_record : logging.LogRecord + Log record to extract keys from - if record.exc_info and not record.exc_text: - # Cache the traceback text to avoid converting it multiple times - # (it's constant anyway) - # from logging.Formatter:format - record.exc_text = self.formatException(record.exc_info) + Returns + ------- + formatted_log: Dict + Structured log as dictionary + """ + record_dict = log_record.__dict__.copy() # has extra kwargs we are after + record_dict["asctime"] = self.formatTime(log_record, self.datefmt) - if record.exc_text: - log_dict["exception"] = record.exc_text + formatted_log = {} - # fetch latest X-Ray Trace ID, if any - log_dict.update({"xray_trace_id": self._get_latest_trace_id()}) + # We have to iterate over a default or existing log structure + # then replace any logging expression for reserved keys e.g. '%(level)s' to 'INFO' + # and lastly add or replace incoming keys (those added within the constructor or .structure_logs method) + for key, value in self.log_format.items(): + if value and key in self.reserved_keys: + formatted_log[key] = value % record_dict + else: + formatted_log[key] = value + + # pick up extra keys when logging a new message e.g. log.info("my message", extra={"additional_key": "value"} + # these messages will be added to the root of the final structure not within `message` key + for key, value in record_dict.items(): + if key not in STD_LOGGING_KEYS: + formatted_log[key] = value + + return formatted_log + + def format(self, record): # noqa: A003 + formatted_log = self._extract_log_keys(log_record=record) + formatted_log["message"] = self._extract_log_message(log_record=record) + formatted_log["exception"] = self._extract_log_exception(log_record=record) + formatted_log.update({"xray_trace_id": self._get_latest_trace_id()}) # fetch latest Trace ID, if any # Filter out top level key with values that are None - log_dict = {k: v for k, v in log_dict.items() if v is not None} + formatted_log = {k: v for k, v in formatted_log.items() if v is not None} - return json.dumps(log_dict, default=self.default_json_formatter) + return json.dumps(formatted_log, default=self.default_json_formatter) diff --git a/docs/content/core/logger.mdx b/docs/content/core/logger.mdx index baabd9c2faf..e54ea43cd62 100644 --- a/docs/content/core/logger.mdx +++ b/docs/content/core/logger.mdx @@ -150,6 +150,13 @@ def handler(event, context): ## Appending additional keys +You can append additional keys using either mechanism: + +* Persist new keys across all future log messages via `structure_logs` +* Add additional keys on a per log message basis via `extra` parameter + +### structure_logs + You can append your own keys to your existing Logger via `structure_logs` with **append** param. ```python:title=collect.py @@ -179,11 +186,6 @@ This example will add `order_id` if its value is not empty, and in subsequent in "level": "INFO", "location": "collect.handler:1", "service": "payment", - "lambda_function_name": "test", - "lambda_function_memory_size": 128, - "lambda_function_arn": "arn:aws:lambda:eu-west-1:12345678910:function:test", - "lambda_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72", - "cold_start": true, "sampling_rate": 0.0, "order_id": "order_id_value", // highlight-line "message": "Collecting payment" @@ -191,6 +193,34 @@ This example will add `order_id` if its value is not empty, and in subsequent in ``` +### extra parameter + +Extra parameter is available for all log levels, as implemented in the standard logging library. It accepts any dictionary, and it'll be added as part of the root structure of the logs. + +```python:title=extra_parameter.py +logger = Logger(service="payment") + +fields = { "request_id": "1123" } + +logger.info("Hello", extra=fields) # highlight-line +``` + +
+Excerpt output in CloudWatch Logs + +```json:title=cloudwatch_logs.json +{ + "timestamp": "2021-01-12 14:08:12,357", + "level": "INFO", + "location": "collect.handler:1", + "service": "payment", + "sampling_rate": 0.0, + "request_id": "1123", // highlight-line + "message": "Collecting payment" +} +``` +
+ ## Reusing Logger across your code Logger supports inheritance via `child` parameter. This allows you to create multiple Loggers across your code base, and propagate changes such as new keys to all Loggers. @@ -291,13 +321,13 @@ def handler(event, context): If you're migrating from other Loggers, there are few key points to be aware of: **Service parameter**, **Inheriting Loggers**, **Overriding Log records**, and **Logging exceptions**. -### The service parameter +### service parameter Service is what defines what the function is responsible for, or part of (e.g payment service), and the name of the Logger. For Logger, the `service` is the logging key customers can use to search log operations for one or more functions - For example, **search for all errors, or messages like X, where service is payment**. -### Inheriting Loggers +### inheriting Loggers > Python Logging hierarchy happens via the dot notation: `service`, `service.child`, `service.child_2`. @@ -325,7 +355,7 @@ In this case, Logger will register a Logger named `payment`, and a Logger named This can be fixed by either ensuring both has the `service` value as `payment`, or simply use the environment variable `POWERTOOLS_SERVICE_NAME` to ensure service value will be the same across all Loggers when not explicitly set. -### Overriding Log records +### overriding Log records You might want to continue to use the same date formatting style, or override `location` to display the `package.function_name:line_number` as you previously had. @@ -355,7 +385,7 @@ logger = Logger(stream=stdout, log_record_order=["level","location","message","t Some keys cannot be supressed in the Log records: `sampling_rate` is part of the specification and cannot be supressed; `xray_trace_id` is supressed automatically if X-Ray is not enabled in the Lambda function, and added automatically if it is. -### Logging exceptions +### logging exceptions When logging exceptions, Logger will add a new key named `exception`, and will serialize the full traceback as a string. @@ -434,3 +464,48 @@ def handler(event: Dict, context: LambdaContext) -> List: return response.get("Buckets", []) ``` + +**What's the difference between `structure_log` and `extra`?** + +Keys added with `structure_log` will persist across multiple log messages while keys added via `extra` will only be available in a given log message operation. + +**Example - Persisting payment_id not request_id** + +```python +from aws_lambda_powertools import Logger + +logger = Logger(service="payment") +logger.structure_logs(append=True, payment_id="123456789") + +try: + booking_id = book_flight() + logger.info("Flight booked successfully", extra={ "booking_id": booking_id}) +except BookingReservationError: + ... + +logger.info("goodbye") +``` + +Note that `payment_id` remains in both log messages while `booking_id` is only available in the first message. + +```json +{ + "level": "INFO", + "location": ":5", + "message": "Flight booked successfully", + "timestamp": "2021-01-12 14:09:10,859", + "service": "payment", + "sampling_rate": 0.0, + "payment_id": "123456789", + "booking_id": "75edbad0-0857-4fc9-b547-6180e2f7959b" +}, +{ + "level": "INFO", + "location": ":6", + "message": "goodbye", + "timestamp": "2021-01-12 14:09:10,860", + "service": "payment", + "sampling_rate": 0.0, + "payment_id": "123456789" +} +``` diff --git a/tests/functional/test_logger.py b/tests/functional/test_logger.py index dced7da35ba..8e8025a6cb8 100644 --- a/tests/functional/test_logger.py +++ b/tests/functional/test_logger.py @@ -381,3 +381,22 @@ def test_logger_do_not_log_twice_when_root_logger_is_setup(stdout, service_name) # since child's log records propagated to root logger should be rejected logs = list(stdout.getvalue().strip().split("\n")) assert len(logs) == 2 + + +def test_logger_extra_kwargs(stdout, service_name): + # GIVEN Logger is initialized + logger = Logger(service=service_name, stream=stdout) + + # WHEN `request_id` is an extra field in a log message to the existing structured log + fields = {"request_id": "blah"} + + logger.info("with extra fields", extra=fields) + logger.info("without extra fields") + + extra_fields_log, no_extra_fields_log = capture_multiple_logging_statements_output(stdout) + + # THEN first log should have request_id field in the root structure + assert "request_id" in extra_fields_log + + # THEN second log should not have request_id in the root structure + assert "request_id" not in no_extra_fields_log From dfb97afacbe3f7c9d49e54b14f1315a0b0914e08 Mon Sep 17 00:00:00 2001 From: Alex Date: Wed, 13 Jan 2021 15:11:52 +0100 Subject: [PATCH 3/8] docs: add info about extras layer (#260) * docs: add info about extras layer * docs: cleanup * Update docs/content/index.mdx Co-authored-by: Heitor Lessa --- docs/content/index.mdx | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/docs/content/index.mdx b/docs/content/index.mdx index 6af7d601862..e8ff25910dc 100644 --- a/docs/content/index.mdx +++ b/docs/content/index.mdx @@ -28,10 +28,14 @@ sam init --location https://github.com/aws-samples/cookiecutter-aws-sam-python ### Lambda Layer Powertools is also available as a Lambda Layer. It is distributed via the [AWS Serverless Application Repository (SAR)](https://docs.aws.amazon.com/serverlessrepo/latest/devguide/what-is-serverlessrepo.html). +We have two layers available, one with core dependencies `aws-lambda-powertools-python-layer` and one with extras `aws-lambda-powertools-python-layer-extras` such as `pydantic` which is required for the parser. + +> **NOTE**: Extras layer support does not support Python 3.6 runtime. This layer is also includes all extra dependencies and is 22.4MB zipped, ~155MB unzipped big. App | ARN ----------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------- [aws-lambda-powertools-python-layer](https://serverlessrepo.aws.amazon.com/applications/eu-west-1/057560766410/aws-lambda-powertools-python-layer) | arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer +[aws-lambda-powertools-python-layer-extras](https://serverlessrepo.aws.amazon.com/applications/eu-west-1/057560766410/aws-lambda-powertools-python-layer-extras) | arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer-extras If using SAM, you can include this SAR App as part of your shared Layers stack, and lock to a specific semantic version. Once deployed, it'll be available across the account this is deployed to. @@ -41,9 +45,10 @@ If using SAM, you can include this SAR App as part of your shared Layers stack, Properties: Location: ApplicationId: arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer - SemanticVersion: 1.3.1 # change to latest semantic version available in SAR + SemanticVersion: 1.9.0 # change to latest semantic version available in SAR ``` + This will add a nested app stack with an output parameter `LayerVersionArn`, that you can reference inside your Lambda function definition: ```yaml @@ -51,6 +56,7 @@ This will add a nested app stack with an output parameter `LayerVersionArn`, tha - !GetAtt AwsLambdaPowertoolsPythonLayer.Outputs.LayerVersionArn ``` + Here is the list of IAM permissions that you need to add to your deployment IAM role to use the layer: ```yaml From 8ad7e047ca7a11d92a1044926169ca696a54e10d Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Wed, 13 Jan 2021 16:43:26 +0100 Subject: [PATCH 4/8] improv: override Tracer auto-capture response/exception via env vars (#259) * improv: add capture_response/error env vars * improv: add tests for capture_response/error env vars * docs: update env vars * improv: address Nicolas feedback; improve docs * docs: fix wording on disable exception example --- aws_lambda_powertools/shared/__init__.py | 0 aws_lambda_powertools/shared/constants.py | 4 + aws_lambda_powertools/shared/functions.py | 5 + aws_lambda_powertools/tracing/tracer.py | 116 +++++++++++++++++----- docs/content/core/tracer.mdx | 25 ++++- docs/content/index.mdx | 24 +++-- tests/functional/test_shared_functions.py | 9 ++ tests/unit/test_tracing.py | 8 +- 8 files changed, 151 insertions(+), 40 deletions(-) create mode 100644 aws_lambda_powertools/shared/__init__.py create mode 100644 aws_lambda_powertools/shared/constants.py create mode 100644 aws_lambda_powertools/shared/functions.py create mode 100644 tests/functional/test_shared_functions.py diff --git a/aws_lambda_powertools/shared/__init__.py b/aws_lambda_powertools/shared/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/aws_lambda_powertools/shared/constants.py b/aws_lambda_powertools/shared/constants.py new file mode 100644 index 00000000000..a3863c33286 --- /dev/null +++ b/aws_lambda_powertools/shared/constants.py @@ -0,0 +1,4 @@ +import os + +TRACER_CAPTURE_RESPONSE_ENV: str = os.getenv("POWERTOOLS_TRACER_CAPTURE_RESPONSE", "true") +TRACER_CAPTURE_ERROR_ENV: str = os.getenv("POWERTOOLS_TRACER_CAPTURE_ERROR", "true") diff --git a/aws_lambda_powertools/shared/functions.py b/aws_lambda_powertools/shared/functions.py new file mode 100644 index 00000000000..2a3af7db5f3 --- /dev/null +++ b/aws_lambda_powertools/shared/functions.py @@ -0,0 +1,5 @@ +from distutils.util import strtobool + + +def resolve_env_var_choice(env: str, choice: bool = None) -> bool: + return choice if choice is not None else strtobool(env) diff --git a/aws_lambda_powertools/tracing/tracer.py b/aws_lambda_powertools/tracing/tracer.py index 079f662a9ad..2e083e5ebab 100644 --- a/aws_lambda_powertools/tracing/tracer.py +++ b/aws_lambda_powertools/tracing/tracer.py @@ -5,11 +5,14 @@ import logging import os from distutils.util import strtobool -from typing import Any, Callable, Dict, List, Tuple +from typing import Any, Callable, Dict, List, Optional, Tuple import aws_xray_sdk import aws_xray_sdk.core +from aws_lambda_powertools.shared.constants import TRACER_CAPTURE_ERROR_ENV, TRACER_CAPTURE_RESPONSE_ENV +from aws_lambda_powertools.shared.functions import resolve_env_var_choice + is_cold_start = True logger = logging.getLogger(__name__) @@ -34,6 +37,10 @@ class Tracer: disable tracer (e.g. `"true", "True", "TRUE"`) POWERTOOLS_SERVICE_NAME : str service name + POWERTOOLS_TRACER_CAPTURE_RESPONSE : str + disable auto-capture response as metadata (e.g. `"true", "True", "TRUE"`) + POWERTOOLS_TRACER_CAPTURE_ERROR : str + disable auto-capture error as metadata (e.g. `"true", "True", "TRUE"`) Parameters ---------- @@ -226,7 +233,12 @@ def patch(self, modules: Tuple[str] = None): else: aws_xray_sdk.core.patch(modules) - def capture_lambda_handler(self, lambda_handler: Callable[[Dict, Any], Any] = None, capture_response: bool = True): + def capture_lambda_handler( + self, + lambda_handler: Callable[[Dict, Any], Any] = None, + capture_response: Optional[bool] = None, + capture_error: Optional[bool] = None, + ): """Decorator to create subsegment for lambda handlers As Lambda follows (event, context) signature we can remove some of the boilerplate @@ -237,7 +249,9 @@ def capture_lambda_handler(self, lambda_handler: Callable[[Dict, Any], Any] = No lambda_handler : Callable Method to annotate on capture_response : bool, optional - Instructs tracer to not include handler's response as metadata, by default True + Instructs tracer to not include handler's response as metadata + capture_error : bool, optional + Instructs tracer to not include handler's error as metadata, by default True Example ------- @@ -264,10 +278,15 @@ def handler(event, context): # Return a partial function with args filled if lambda_handler is None: logger.debug("Decorator called with parameters") - return functools.partial(self.capture_lambda_handler, capture_response=capture_response) + return functools.partial( + self.capture_lambda_handler, capture_response=capture_response, capture_error=capture_error + ) lambda_handler_name = lambda_handler.__name__ + capture_response = resolve_env_var_choice(env=TRACER_CAPTURE_RESPONSE_ENV, choice=capture_response) + capture_error = resolve_env_var_choice(env=TRACER_CAPTURE_ERROR_ENV, choice=capture_error) + @functools.wraps(lambda_handler) def decorate(event, context): with self.provider.in_subsegment(name=f"## {lambda_handler_name}") as subsegment: @@ -290,7 +309,7 @@ def decorate(event, context): except Exception as err: logger.exception(f"Exception received from {lambda_handler_name}") self._add_full_exception_as_metadata( - method_name=lambda_handler_name, error=err, subsegment=subsegment + method_name=lambda_handler_name, error=err, subsegment=subsegment, capture_error=capture_error ) raise @@ -298,7 +317,9 @@ def decorate(event, context): return decorate - def capture_method(self, method: Callable = None, capture_response: bool = True): + def capture_method( + self, method: Callable = None, capture_response: Optional[bool] = None, capture_error: Optional[bool] = None + ): """Decorator to create subsegment for arbitrary functions It also captures both response and exceptions as metadata @@ -317,7 +338,9 @@ def capture_method(self, method: Callable = None, capture_response: bool = True) method : Callable Method to annotate on capture_response : bool, optional - Instructs tracer to not include method's response as metadata, by default True + Instructs tracer to not include method's response as metadata + capture_error : bool, optional + Instructs tracer to not include handler's error as metadata, by default True Example ------- @@ -449,28 +472,39 @@ async def async_tasks(): # Return a partial function with args filled if method is None: logger.debug("Decorator called with parameters") - return functools.partial(self.capture_method, capture_response=capture_response) + return functools.partial( + self.capture_method, capture_response=capture_response, capture_error=capture_error + ) method_name = f"{method.__name__}" + capture_response = resolve_env_var_choice(env=TRACER_CAPTURE_RESPONSE_ENV, choice=capture_response) + capture_error = resolve_env_var_choice(env=TRACER_CAPTURE_ERROR_ENV, choice=capture_error) + if inspect.iscoroutinefunction(method): return self._decorate_async_function( - method=method, capture_response=capture_response, method_name=method_name + method=method, capture_response=capture_response, capture_error=capture_error, method_name=method_name ) elif inspect.isgeneratorfunction(method): return self._decorate_generator_function( - method=method, capture_response=capture_response, method_name=method_name + method=method, capture_response=capture_response, capture_error=capture_error, method_name=method_name ) elif hasattr(method, "__wrapped__") and inspect.isgeneratorfunction(method.__wrapped__): return self._decorate_generator_function_with_context_manager( - method=method, capture_response=capture_response, method_name=method_name + method=method, capture_response=capture_response, capture_error=capture_error, method_name=method_name ) else: return self._decorate_sync_function( - method=method, capture_response=capture_response, method_name=method_name + method=method, capture_response=capture_response, capture_error=capture_error, method_name=method_name ) - def _decorate_async_function(self, method: Callable = None, capture_response: bool = True, method_name: str = None): + def _decorate_async_function( + self, + method: Callable = None, + capture_response: Optional[bool] = None, + capture_error: Optional[bool] = None, + method_name: str = None, + ): @functools.wraps(method) async def decorate(*args, **kwargs): async with self.provider.in_subsegment_async(name=f"## {method_name}") as subsegment: @@ -478,14 +512,13 @@ async def decorate(*args, **kwargs): logger.debug(f"Calling method: {method_name}") response = await method(*args, **kwargs) self._add_response_as_metadata( - method_name=method_name, - data=response, - subsegment=subsegment, - capture_response=capture_response, + method_name=method_name, data=response, subsegment=subsegment, capture_response=capture_response ) except Exception as err: logger.exception(f"Exception received from '{method_name}' method") - self._add_full_exception_as_metadata(method_name=method_name, error=err, subsegment=subsegment) + self._add_full_exception_as_metadata( + method_name=method_name, error=err, subsegment=subsegment, capture_error=capture_error + ) raise return response @@ -493,7 +526,11 @@ async def decorate(*args, **kwargs): return decorate def _decorate_generator_function( - self, method: Callable = None, capture_response: bool = True, method_name: str = None + self, + method: Callable = None, + capture_response: Optional[bool] = None, + capture_error: Optional[bool] = None, + method_name: str = None, ): @functools.wraps(method) def decorate(*args, **kwargs): @@ -506,7 +543,9 @@ def decorate(*args, **kwargs): ) except Exception as err: logger.exception(f"Exception received from '{method_name}' method") - self._add_full_exception_as_metadata(method_name=method_name, error=err, subsegment=subsegment) + self._add_full_exception_as_metadata( + method_name=method_name, error=err, subsegment=subsegment, capture_error=capture_error + ) raise return result @@ -514,7 +553,11 @@ def decorate(*args, **kwargs): return decorate def _decorate_generator_function_with_context_manager( - self, method: Callable = None, capture_response: bool = True, method_name: str = None + self, + method: Callable = None, + capture_response: Optional[bool] = None, + capture_error: Optional[bool] = None, + method_name: str = None, ): @functools.wraps(method) @contextlib.contextmanager @@ -530,12 +573,20 @@ def decorate(*args, **kwargs): ) except Exception as err: logger.exception(f"Exception received from '{method_name}' method") - self._add_full_exception_as_metadata(method_name=method_name, error=err, subsegment=subsegment) + self._add_full_exception_as_metadata( + method_name=method_name, error=err, subsegment=subsegment, capture_error=capture_error + ) raise return decorate - def _decorate_sync_function(self, method: Callable = None, capture_response: bool = True, method_name: str = None): + def _decorate_sync_function( + self, + method: Callable = None, + capture_response: Optional[bool] = None, + capture_error: Optional[bool] = None, + method_name: str = None, + ): @functools.wraps(method) def decorate(*args, **kwargs): with self.provider.in_subsegment(name=f"## {method_name}") as subsegment: @@ -550,7 +601,9 @@ def decorate(*args, **kwargs): ) except Exception as err: logger.exception(f"Exception received from '{method_name}' method") - self._add_full_exception_as_metadata(method_name=method_name, error=err, subsegment=subsegment) + self._add_full_exception_as_metadata( + method_name=method_name, error=err, subsegment=subsegment, capture_error=capture_error + ) raise return response @@ -562,7 +615,7 @@ def _add_response_as_metadata( method_name: str = None, data: Any = None, subsegment: aws_xray_sdk.core.models.subsegment = None, - capture_response: bool = True, + capture_response: Optional[bool] = None, ): """Add response as metadata for given subsegment @@ -575,7 +628,7 @@ def _add_response_as_metadata( subsegment : aws_xray_sdk.core.models.subsegment, optional existing subsegment to add metadata on, by default None capture_response : bool, optional - Do not include response as metadata, by default True + Do not include response as metadata """ if data is None or not capture_response or subsegment is None: return @@ -583,7 +636,11 @@ def _add_response_as_metadata( subsegment.put_metadata(key=f"{method_name} response", value=data, namespace=self._config["service"]) def _add_full_exception_as_metadata( - self, method_name: str = None, error: Exception = None, subsegment: aws_xray_sdk.core.models.subsegment = None + self, + method_name: str = None, + error: Exception = None, + subsegment: aws_xray_sdk.core.models.subsegment = None, + capture_error: Optional[bool] = None, ): """Add full exception object as metadata for given subsegment @@ -595,7 +652,12 @@ def _add_full_exception_as_metadata( error to add as subsegment metadata, by default None subsegment : aws_xray_sdk.core.models.subsegment, optional existing subsegment to add metadata on, by default None + capture_error : bool, optional + Do not include error as metadata, by default True """ + if not capture_error: + return + subsegment.put_metadata(key=f"{method_name} error", value=error, namespace=self._config["service"]) @staticmethod diff --git a/docs/content/core/tracer.mdx b/docs/content/core/tracer.mdx index 62559ac9fe2..2e36f4b7d0b 100644 --- a/docs/content/core/tracer.mdx +++ b/docs/content/core/tracer.mdx @@ -65,12 +65,16 @@ def handler(event, context): ... ``` +### disabling response auto-capture + +> New in 1.9.0 + Returning sensitive information from your Lambda handler or functions, where Tracer is used?

- You can disable Tracer from capturing their responses as tracing metadata with capture_response=False parameter in both capture_lambda_handler and capture_method decorators.

+You can disable Tracer from capturing their responses as tracing metadata with capture_response=False parameter in both capture_lambda_handler and capture_method decorators. ```python:title=do_not_capture_response_as_metadata.py # Disables Tracer from capturing response and adding as metadata @@ -80,6 +84,25 @@ def handler(event, context): return "sensitive_information" ``` +### disabling exception auto-capture + +> New in 1.10.0 + + + Can exceptions contain sensitive information from your Lambda handler or functions, where Tracer is used? +

+

+ +You can disable Tracer from capturing their exceptions as tracing metadata with capture_error=False parameter in both capture_lambda_handler and capture_method decorators. + +```python:title=do_not_capture_exception_as_metadata.py +# Disables Tracer from capturing exception and adding as metadata +# Useful when dealing with sensitive data +@tracer.capture_lambda_handler(capture_error=False) # highlight-line +def handler(event, context): + raise ValueError("some sensitive info in the stack trace...") +``` + ### Annotations Annotations are key-values indexed by AWS X-Ray on a per trace basis. You can use them to filter traces as well as to create [Trace Groups](https://aws.amazon.com/about-aws/whats-new/2018/11/aws-xray-adds-the-ability-to-group-traces/). diff --git a/docs/content/index.mdx b/docs/content/index.mdx index e8ff25910dc..d313f284368 100644 --- a/docs/content/index.mdx +++ b/docs/content/index.mdx @@ -122,17 +122,23 @@ Utility | Description ## Environment variables + + Explicit parameters take precedence over environment variables.

+
+ **Environment variables** used across suite of utilities. -Environment variable | Description | Utility -------------------------------------------------- | --------------------------------------------------------------------------------- | --------------------------------------------------------------------------------- | ------------------------------------------------- -**POWERTOOLS_SERVICE_NAME** | Sets service name used for tracing namespace, metrics dimension and structured logging | All -**POWERTOOLS_METRICS_NAMESPACE** | Sets namespace used for metrics | [Metrics](./core/metrics) -**POWERTOOLS_TRACE_DISABLED** | Disables tracing | [Tracing](./core/tracer) -**POWERTOOLS_TRACE_MIDDLEWARES** | Creates sub-segment for each custom middleware | [Middleware factory](./utilities/middleware_factory) -**POWERTOOLS_LOGGER_LOG_EVENT** | Logs incoming event | [Logging](./core/logger) -**POWERTOOLS_LOGGER_SAMPLE_RATE** | Debug log sampling | [Logging](./core/logger) -**LOG_LEVEL** | Sets logging level | [Logging](./core/logger) +Environment variable | Description | Utility | Default +------------------------------------------------- | --------------------------------------------------------------------------------- | --------------------------------------------------------------------------------- | ------------------------------------------------- | ------------------------------------------------- +**POWERTOOLS_SERVICE_NAME** | Sets service name used for tracing namespace, metrics dimension and structured logging | All | `"service_undefined"` +**POWERTOOLS_METRICS_NAMESPACE** | Sets namespace used for metrics | [Metrics](./core/metrics) | `None` +**POWERTOOLS_TRACE_DISABLED** | Disables tracing | [Tracing](./core/tracer) | `false` +**POWERTOOLS_TRACER_CAPTURE_RESPONSE** | Captures Lambda or method return as metadata. | [Tracing](./core/tracer) | `true` +**POWERTOOLS_TRACER_CAPTURE_ERROR** | Captures Lambda or method exception as metadata. | [Tracing](./core/tracer) | `true` +**POWERTOOLS_TRACE_MIDDLEWARES** | Creates sub-segment for each custom middleware | [Middleware factory](./utilities/middleware_factory) | `false` +**POWERTOOLS_LOGGER_LOG_EVENT** | Logs incoming event | [Logging](./core/logger) | `false` +**POWERTOOLS_LOGGER_SAMPLE_RATE** | Debug log sampling | [Logging](./core/logger) | `0` +**LOG_LEVEL** | Sets logging level | [Logging](./core/logger) | `INFO` ## Debug mode diff --git a/tests/functional/test_shared_functions.py b/tests/functional/test_shared_functions.py new file mode 100644 index 00000000000..ac05babb753 --- /dev/null +++ b/tests/functional/test_shared_functions.py @@ -0,0 +1,9 @@ +from aws_lambda_powertools.shared.functions import resolve_env_var_choice + + +def test_resolve_env_var_choice_explicit_wins_over_env_var(): + assert resolve_env_var_choice(env="true", choice=False) is False + + +def test_resolve_env_var_choice_env_wins_over_absent_explicit(): + assert resolve_env_var_choice(env="true") == 1 diff --git a/tests/unit/test_tracing.py b/tests/unit/test_tracing.py index c8df34a76a6..71188e216ec 100644 --- a/tests/unit/test_tracing.py +++ b/tests/unit/test_tracing.py @@ -502,13 +502,15 @@ def generator_fn(): assert str(put_metadata_mock_args["value"]) == "test" -def test_tracer_lambda_handler_does_not_add_response_as_metadata(mocker, provider_stub, in_subsegment_mock): +def test_tracer_lambda_handler_override_response_as_metadata(mocker, provider_stub, in_subsegment_mock): # GIVEN tracer is initialized provider = provider_stub(in_subsegment=in_subsegment_mock.in_subsegment) + + mocker.patch("aws_lambda_powertools.tracing.tracer.TRACER_CAPTURE_RESPONSE_ENV", return_value=True) tracer = Tracer(provider=provider, auto_patch=False) # WHEN capture_lambda_handler decorator is used - # and the handler response is empty + # with capture_response set to False @tracer.capture_lambda_handler(capture_response=False) def handler(event, context): return "response" @@ -519,7 +521,7 @@ def handler(event, context): assert in_subsegment_mock.put_metadata.call_count == 0 -def test_tracer_method_does_not_add_response_as_metadata(mocker, provider_stub, in_subsegment_mock): +def test_tracer_method_override_response_as_metadata(provider_stub, in_subsegment_mock): # GIVEN tracer is initialized provider = provider_stub(in_subsegment=in_subsegment_mock.in_subsegment) tracer = Tracer(provider=provider, auto_patch=False) From e2ae79eeaade73f44f2a7d278c10b5836f4ebc3f Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Thu, 14 Jan 2021 15:42:30 +0100 Subject: [PATCH 5/8] chore: update stale bot --- .github/stale.yml | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/.github/stale.yml b/.github/stale.yml index 1bb42057efb..d0b1d54f4a3 100644 --- a/.github/stale.yml +++ b/.github/stale.yml @@ -1,12 +1,9 @@ only: issues -daysUntilStale: 30 +daysUntilStale: 14 daysUntilClose: 7 exemptLabels: - bug - - documentation - - enhancement - feature-request - - RFC staleLabel: pending-close-response-required markComment: > This issue has been automatically marked as stale because it has not had From 893139033be609f78653f441eb0fcb5ae75ec985 Mon Sep 17 00:00:00 2001 From: Ran Isenberg <60175085+risenberg-cyberark@users.noreply.github.com> Date: Fri, 15 Jan 2021 18:52:23 +0200 Subject: [PATCH 6/8] feat: Add AppConfig parameter provider (#236) * feat: Add AppConfig parameter provider * fix: fix identation and remove YAML transform reference Co-authored-by: Ran Isenberg Co-authored-by: Nicolas Moutschen --- .../utilities/parameters/__init__.py | 3 + .../utilities/parameters/appconfig.py | 158 ++++++++++++++++++ docs/content/utilities/parameters.mdx | 49 +++++- tests/functional/test_utilities_parameters.py | 83 +++++++++ 4 files changed, 285 insertions(+), 8 deletions(-) create mode 100644 aws_lambda_powertools/utilities/parameters/appconfig.py diff --git a/aws_lambda_powertools/utilities/parameters/__init__.py b/aws_lambda_powertools/utilities/parameters/__init__.py index 07f9cb2ca76..83a426757dc 100644 --- a/aws_lambda_powertools/utilities/parameters/__init__.py +++ b/aws_lambda_powertools/utilities/parameters/__init__.py @@ -4,6 +4,7 @@ Parameter retrieval and caching utility """ +from .appconfig import AppConfigProvider, get_app_config from .base import BaseProvider from .dynamodb import DynamoDBProvider from .exceptions import GetParameterError, TransformParameterError @@ -11,12 +12,14 @@ from .ssm import SSMProvider, get_parameter, get_parameters __all__ = [ + "AppConfigProvider", "BaseProvider", "GetParameterError", "DynamoDBProvider", "SecretsProvider", "SSMProvider", "TransformParameterError", + "get_app_config", "get_parameter", "get_parameters", "get_secret", diff --git a/aws_lambda_powertools/utilities/parameters/appconfig.py b/aws_lambda_powertools/utilities/parameters/appconfig.py new file mode 100644 index 00000000000..ffef7e37e19 --- /dev/null +++ b/aws_lambda_powertools/utilities/parameters/appconfig.py @@ -0,0 +1,158 @@ +""" +AWS App Config configuration retrieval and caching utility +""" + + +import os +from typing import Dict, Optional, Union +from uuid import uuid4 + +import boto3 +from botocore.config import Config + +from .base import DEFAULT_PROVIDERS, BaseProvider + +CLIENT_ID = str(uuid4()) + + +class AppConfigProvider(BaseProvider): + """ + AWS App Config Provider + + Parameters + ---------- + environment: str + Environment of the configuration to pass during client initialization + application: str, optional + Application of the configuration to pass during client initialization + config: botocore.config.Config, optional + Botocore configuration to pass during client initialization + + Example + ------- + **Retrieves the latest configuration value from App Config** + + >>> from aws_lambda_powertools.utilities.parameters import AppConfigProvider + >>> appconf_provider = parameters.AppConfigProvider(environment="my_env", application="my_app") + >>> + >>> value : bytes = appconf_provider.get("my_conf") + >>> + >>> print(value) + My configuration value + + **Retrieves a configuration value from App Config in another AWS region** + + >>> from botocore.config import Config + >>> from aws_lambda_powertools.utilities.parameters import AppConfigProvider + >>> + >>> config = Config(region_name="us-west-1") + >>> appconf_provider = parameters.AppConfigProvider(environment="my_env", application="my_app", config=config) + >>> + >>> value : bytes = appconf_provider.get("my_conf") + >>> + >>> print(value) + My configuration value + + """ + + client = None + + def __init__( + self, environment: str, application: Optional[str] = None, config: Optional[Config] = None, + ): + """ + Initialize the App Config client + """ + + config = config or Config() + self.client = boto3.client("appconfig", config=config) + self.application = application or os.getenv("POWERTOOLS_SERVICE_NAME") or "application_undefined" + self.environment = environment + self.current_version = "" + + super().__init__() + + def _get(self, name: str, **sdk_options) -> str: + """ + Retrieve a parameter value from AWS App config. + + Parameters + ---------- + name: str + Name of the configuration + environment: str + Environment of the configuration + sdk_options: dict, optional + Dictionary of options that will be passed to the Parameter Store get_parameter API call + """ + + sdk_options["Configuration"] = name + sdk_options["Application"] = self.application + sdk_options["Environment"] = self.environment + sdk_options["ClientId"] = CLIENT_ID + + response = self.client.get_configuration(**sdk_options) + return response["Content"].read() # read() of botocore.response.StreamingBody + + def _get_multiple(self, path: str, **sdk_options) -> Dict[str, str]: + """ + Retrieving multiple parameter values is not supported with AWS App Config Provider + """ + raise NotImplementedError() + + +def get_app_config( + name: str, environment: str, application: Optional[str] = None, transform: Optional[str] = None, **sdk_options +) -> Union[str, list, dict, bytes]: + """ + Retrieve a configuration value from AWS App Config. + + Parameters + ---------- + name: str + Name of the configuration + environment: str + Environment of the configuration + application: str + Application of the configuration + transform: str, optional + Transforms the content from a JSON object ('json') or base64 binary string ('binary') + sdk_options: dict, optional + Dictionary of options that will be passed to the Parameter Store get_parameter API call + + Raises + ------ + GetParameterError + When the parameter provider fails to retrieve a parameter value for + a given name. + TransformParameterError + When the parameter provider fails to transform a parameter value. + + Example + ------- + **Retrieves the latest version of configuration value from App Config** + + >>> from aws_lambda_powertools.utilities.parameters import get_app_config + >>> + >>> value = get_app_config("my_config", environment="my_env", application="my_env") + >>> + >>> print(value) + My configuration value + + **Retrieves a confiugration value and decodes it using a JSON decoder** + + >>> from aws_lambda_powertools.utilities.parameters import get_parameter + >>> + >>> value = get_app_config("my_config", environment="my_env", application="my_env", transform='json') + >>> + >>> print(value) + My configuration's JSON value + """ + + # Only create the provider if this function is called at least once + if "appconfig" not in DEFAULT_PROVIDERS: + DEFAULT_PROVIDERS["appconfig"] = AppConfigProvider(environment=environment, application=application) + + sdk_options["ClientId"] = CLIENT_ID + + return DEFAULT_PROVIDERS["appconfig"].get(name, transform=transform, **sdk_options) diff --git a/docs/content/utilities/parameters.mdx b/docs/content/utilities/parameters.mdx index b40bfe2c885..2bb6fcee1d7 100644 --- a/docs/content/utilities/parameters.mdx +++ b/docs/content/utilities/parameters.mdx @@ -24,6 +24,7 @@ SSM Parameter Store | `get_parameters`, `SSMProvider.get_multiple` | `ssm:GetPar Secrets Manager | `get_secret`, `SecretsManager.get` | `secretsmanager:GetSecretValue` DynamoDB | `DynamoDBProvider.get` | `dynamodb:GetItem` DynamoDB | `DynamoDBProvider.get_multiple` | `dynamodb:Query` +App Config | `AppConfigProvider.get_app_config`, `get_app_config` | `appconfig:GetConfiguration` ## SSM Parameter Store @@ -204,6 +205,37 @@ def handler(event, context): value = dynamodb_provider.get("my-parameter") ``` +## App Config + +For configurations stored in App Config, use `get_app_config`. +The following will retrieve the latest version and store it in the cache. + +```python:title=appconfig.py +from aws_lambda_powertools.utilities import parameters + +def handler(event, context): + # Retrieve a single configuration, latest version + value: bytes = parameters.get_app_config(name="my_configuration", environment="my_env", application="my_app") +``` + +### AppConfigProvider class + +Alternatively, you can use the `AppConfigProvider` class, which give more flexibility, such as the ability to configure the underlying SDK client. + +This can be used to retrieve values from other regions, change the retry behavior, etc. + +```python:title=appconfig.py +from aws_lambda_powertools.utilities import parameters +from botocore.config import Config + +config = Config(region_name="us-west-1") +appconf_provider = parameters.AppConfigProvider(environment="my_env", application="my_app", config=config) + +def handler(event, context): + # Retrieve a single secret + value : bytes = appconf_provider.get("my_conf") +``` + ## Create your own provider You can create your own custom parameter store provider by inheriting the `BaseProvider` class, and implementing both `_get()` and `_get_multiple()` methods to retrieve a single, or multiple parameters from your custom store. @@ -334,13 +366,14 @@ def handler(event, context): Here is the mapping between this utility's functions and methods and the underlying SDK: -| Provider | Function/Method | Client name | Function name | -|---------------------|---------------------------------|-------------|---------------| -| SSM Parameter Store | `get_parameter` | `ssm` | [get_parameter](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ssm.html#SSM.Client.get_parameter) | -| SSM Parameter Store | `get_parameters` | `ssm` | [get_parameters_by_path](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ssm.html#SSM.Client.get_parameters_by_path) | -| SSM Parameter Store | `SSMProvider.get` | `ssm` | [get_parameter](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ssm.html#SSM.Client.get_parameter) | -| SSM Parameter Store | `SSMProvider.get_multiple` | `ssm` | [get_parameters_by_path](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ssm.html#SSM.Client.get_parameters_by_path) | +| Provider | Function/Method | Client name | Function name | +|---------------------|---------------------------------|------------------|----------------| +| SSM Parameter Store | `get_parameter` | `ssm` | [get_parameter](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ssm.html#SSM.Client.get_parameter) | +| SSM Parameter Store | `get_parameters` | `ssm` | [get_parameters_by_path](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ssm.html#SSM.Client.get_parameters_by_path) | +| SSM Parameter Store | `SSMProvider.get` | `ssm` | [get_parameter](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ssm.html#SSM.Client.get_parameter) | +| SSM Parameter Store | `SSMProvider.get_multiple` | `ssm` | [get_parameters_by_path](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ssm.html#SSM.Client.get_parameters_by_path) | | Secrets Manager | `get_secret` | `secretsmanager` | [get_secret_value](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/secretsmanager.html#SecretsManager.Client.get_secret_value) | | Secrets Manager | `SecretsManager.get` | `secretsmanager` | [get_secret_value](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/secretsmanager.html#SecretsManager.Client.get_secret_value) | -| DynamoDB | `DynamoDBProvider.get` | `dynamodb` ([Table resource](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb.html#table)) | [get_item](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb.html#DynamoDB.Table.get_item) -| DynamoDB | `DynamoDBProvider.get_multiple` | `dynamodb` ([Table resource](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb.html#table)) | [query](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb.html#DynamoDB.Table.query) +| DynamoDB | `DynamoDBProvider.get` | `dynamodb` | ([Table resource](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb.html#table)) | [get_item](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb.html#DynamoDB.Table.get_item) +| DynamoDB | `DynamoDBProvider.get_multiple` | `dynamodb` | ([Table resource](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb.html#table)) | [query](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb.html#DynamoDB.Table.query) +| App Config | `get_app_config` | `appconfig` | [get_configuration](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/appconfig.html#AppConfig.Client.get_configuration) | diff --git a/tests/functional/test_utilities_parameters.py b/tests/functional/test_utilities_parameters.py index 55f643924ad..045b7fbbe18 100644 --- a/tests/functional/test_utilities_parameters.py +++ b/tests/functional/test_utilities_parameters.py @@ -3,12 +3,14 @@ import random import string from datetime import datetime, timedelta +from io import BytesIO from typing import Dict import pytest from boto3.dynamodb.conditions import Key from botocore import stub from botocore.config import Config +from botocore.response import StreamingBody from aws_lambda_powertools.utilities import parameters from aws_lambda_powertools.utilities.parameters.base import BaseProvider, ExpirableValue @@ -1451,6 +1453,87 @@ def _get_multiple(self, path: str, **kwargs) -> Dict[str, str]: assert value == mock_value +def test_appconf_provider_get_configuration_json_content_type(mock_name, config): + """ + Test get_configuration.get with default values + """ + + # Create a new provider + environment = "dev" + application = "myapp" + provider = parameters.AppConfigProvider(environment=environment, application=application, config=config) + + mock_body_json = {"myenvvar1": "Black Panther", "myenvvar2": 3} + encoded_message = json.dumps(mock_body_json).encode("utf-8") + mock_value = StreamingBody(BytesIO(encoded_message), len(encoded_message)) + + # Stub the boto3 client + stubber = stub.Stubber(provider.client) + response = {"Content": mock_value, "ConfigurationVersion": "1", "ContentType": "application/json"} + stubber.add_response("get_configuration", response) + stubber.activate() + + try: + value = provider.get(mock_name, transform="json", ClientConfigurationVersion="2") + + assert value == mock_body_json + stubber.assert_no_pending_responses() + finally: + stubber.deactivate() + + +def test_appconf_provider_get_configuration_no_transform(mock_name, config): + """ + Test appconfigprovider.get with default values + """ + + # Create a new provider + environment = "dev" + application = "myapp" + provider = parameters.AppConfigProvider(environment=environment, application=application, config=config) + + mock_body_json = {"myenvvar1": "Black Panther", "myenvvar2": 3} + encoded_message = json.dumps(mock_body_json).encode("utf-8") + mock_value = StreamingBody(BytesIO(encoded_message), len(encoded_message)) + + # Stub the boto3 client + stubber = stub.Stubber(provider.client) + response = {"Content": mock_value, "ConfigurationVersion": "1", "ContentType": "application/json"} + stubber.add_response("get_configuration", response) + stubber.activate() + + try: + value = provider.get(mock_name) + str_value = value.decode("utf-8") + assert str_value == json.dumps(mock_body_json) + stubber.assert_no_pending_responses() + finally: + stubber.deactivate() + + +def test_appconf_get_app_config_no_transform(monkeypatch, mock_name): + """ + Test get_app_config() + """ + mock_body_json = {"myenvvar1": "Black Panther", "myenvvar2": 3} + + class TestProvider(BaseProvider): + def _get(self, name: str, **kwargs) -> str: + assert name == mock_name + return json.dumps(mock_body_json).encode("utf-8") + + def _get_multiple(self, path: str, **kwargs) -> Dict[str, str]: + raise NotImplementedError() + + monkeypatch.setitem(parameters.base.DEFAULT_PROVIDERS, "appconfig", TestProvider()) + + environment = "dev" + application = "myapp" + value = parameters.get_app_config(mock_name, environment=environment, application=application) + str_value = value.decode("utf-8") + assert str_value == json.dumps(mock_body_json) + + def test_transform_value_json(mock_value): """ Test transform_value() with a json transform From ea9042167604fd24dee4f6546d947a591f3c84ae Mon Sep 17 00:00:00 2001 From: Timo Sutterer Date: Sat, 16 Jan 2021 14:31:59 +0100 Subject: [PATCH 7/8] docs: fix import (#267) --- docs/content/utilities/data_classes.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/content/utilities/data_classes.mdx b/docs/content/utilities/data_classes.mdx index fd449b4edf5..9a602080d8c 100644 --- a/docs/content/utilities/data_classes.mdx +++ b/docs/content/utilities/data_classes.mdx @@ -119,7 +119,7 @@ Create Auth Challenge | `data_classes.cognito_user_pool_event.CreateAuthChalleng Verify Auth Challenge | `data_classes.cognito_user_pool_event.VerifyAuthChallengeResponseTriggerEvent` ```python:title=lambda_app.py -from aws_lambda_powertools.utilities.cognito_user_pool_event import PostConfirmationTriggerEvent +from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import PostConfirmationTriggerEvent def lambda_handler(event, context): event: PostConfirmationTriggerEvent = PostConfirmationTriggerEvent(event) From 454d66938610a2e11ae99c57d945ac877e85c27c Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Sun, 17 Jan 2021 17:26:14 +0100 Subject: [PATCH 8/8] chore: move env names to constant file (#264) * chore: shift vars to constant to ease changing * improv: update tests to include non-truthy choice var --- aws_lambda_powertools/logging/formatter.py | 4 +- aws_lambda_powertools/logging/logger.py | 18 +++++--- aws_lambda_powertools/metrics/base.py | 6 ++- .../middleware_factory/factory.py | 9 ++-- aws_lambda_powertools/shared/constants.py | 17 ++++++-- aws_lambda_powertools/shared/functions.py | 39 +++++++++++++++++- aws_lambda_powertools/tracing/tracer.py | 35 +++++++++------- tests/functional/test_logger.py | 6 +-- tests/functional/test_shared_functions.py | 8 ++-- tests/unit/test_tracing.py | 41 ++++++++++++++++--- 10 files changed, 141 insertions(+), 42 deletions(-) diff --git a/aws_lambda_powertools/logging/formatter.py b/aws_lambda_powertools/logging/formatter.py index 063b97ab21c..cfda64bc8e6 100644 --- a/aws_lambda_powertools/logging/formatter.py +++ b/aws_lambda_powertools/logging/formatter.py @@ -3,6 +3,8 @@ import os from typing import Dict, Iterable, Optional, Union +from ..shared import constants + STD_LOGGING_KEYS = ( "name", "msg", @@ -73,7 +75,7 @@ def _build_root_keys(**kwargs): @staticmethod def _get_latest_trace_id(): - xray_trace_id = os.getenv("_X_AMZN_TRACE_ID") + xray_trace_id = os.getenv(constants.XRAY_TRACE_ID_ENV) return xray_trace_id.split(";")[0].replace("Root=", "") if xray_trace_id else None def update_formatter(self, **kwargs): diff --git a/aws_lambda_powertools/logging/logger.py b/aws_lambda_powertools/logging/logger.py index e03f542e6c6..bc44b14b1e5 100644 --- a/aws_lambda_powertools/logging/logger.py +++ b/aws_lambda_powertools/logging/logger.py @@ -4,9 +4,10 @@ import os import random import sys -from distutils.util import strtobool from typing import Any, Callable, Dict, Union +from ..shared import constants +from ..shared.functions import resolve_env_var_choice, resolve_truthy_env_var_choice from .exceptions import InvalidLoggerSamplingRateError from .filters import SuppressFilter from .formatter import JsonFormatter @@ -122,8 +123,12 @@ def __init__( stream: sys.stdout = None, **kwargs, ): - self.service = service or os.getenv("POWERTOOLS_SERVICE_NAME") or "service_undefined" - self.sampling_rate = sampling_rate or os.getenv("POWERTOOLS_LOGGER_SAMPLE_RATE") or 0.0 + self.service = resolve_env_var_choice( + choice=service, env=os.getenv(constants.SERVICE_NAME_ENV, "service_undefined") + ) + self.sampling_rate = resolve_env_var_choice( + choice=sampling_rate, env=os.getenv(constants.LOGGER_LOG_SAMPLING_RATE, 0.0) + ) self.log_level = self._get_log_level(level) self.child = child self._handler = logging.StreamHandler(stream) if stream is not None else logging.StreamHandler(sys.stdout) @@ -193,7 +198,7 @@ def _configure_sampling(self): f"Please review POWERTOOLS_LOGGER_SAMPLE_RATE environment variable." ) - def inject_lambda_context(self, lambda_handler: Callable[[Dict, Any], Any] = None, log_event: bool = False): + def inject_lambda_context(self, lambda_handler: Callable[[Dict, Any], Any] = None, log_event: bool = None): """Decorator to capture Lambda contextual info and inject into logger Parameters @@ -242,8 +247,9 @@ def handler(event, context): logger.debug("Decorator called with parameters") return functools.partial(self.inject_lambda_context, log_event=log_event) - log_event_env_option = str(os.getenv("POWERTOOLS_LOGGER_LOG_EVENT", "false")) - log_event = strtobool(log_event_env_option) or log_event + log_event = resolve_truthy_env_var_choice( + choice=log_event, env=os.getenv(constants.LOGGER_LOG_EVENT_ENV, "false") + ) @functools.wraps(lambda_handler) def decorate(event, context): diff --git a/aws_lambda_powertools/metrics/base.py b/aws_lambda_powertools/metrics/base.py index b54b72bf58a..ecc44edf0fa 100644 --- a/aws_lambda_powertools/metrics/base.py +++ b/aws_lambda_powertools/metrics/base.py @@ -10,6 +10,8 @@ import fastjsonschema +from ..shared import constants +from ..shared.functions import resolve_env_var_choice from .exceptions import MetricUnitError, MetricValueError, SchemaValidationError logger = logging.getLogger(__name__) @@ -88,8 +90,8 @@ def __init__( ): self.metric_set = metric_set if metric_set is not None else {} self.dimension_set = dimension_set if dimension_set is not None else {} - self.namespace = namespace or os.getenv("POWERTOOLS_METRICS_NAMESPACE") - self.service = service or os.environ.get("POWERTOOLS_SERVICE_NAME") + self.namespace = resolve_env_var_choice(choice=namespace, env=os.getenv(constants.METRICS_NAMESPACE_ENV)) + self.service = resolve_env_var_choice(choice=service, env=os.getenv(constants.SERVICE_NAME_ENV)) self._metric_units = [unit.value for unit in MetricUnit] self._metric_unit_options = list(MetricUnit.__members__) self.metadata_set = self.metadata_set if metadata_set is not None else {} diff --git a/aws_lambda_powertools/middleware_factory/factory.py b/aws_lambda_powertools/middleware_factory/factory.py index d71c2e19d67..77277052272 100644 --- a/aws_lambda_powertools/middleware_factory/factory.py +++ b/aws_lambda_powertools/middleware_factory/factory.py @@ -2,16 +2,17 @@ import inspect import logging import os -from distutils.util import strtobool from typing import Callable +from ..shared import constants +from ..shared.functions import resolve_truthy_env_var_choice from ..tracing import Tracer from .exceptions import MiddlewareInvalidArgumentError logger = logging.getLogger(__name__) -def lambda_handler_decorator(decorator: Callable = None, trace_execution=False): +def lambda_handler_decorator(decorator: Callable = None, trace_execution: bool = None): """Decorator factory for decorating Lambda handlers. You can use lambda_handler_decorator to create your own middlewares, @@ -104,7 +105,9 @@ def lambda_handler(event, context): if decorator is None: return functools.partial(lambda_handler_decorator, trace_execution=trace_execution) - trace_execution = trace_execution or strtobool(str(os.getenv("POWERTOOLS_TRACE_MIDDLEWARES", False))) + trace_execution = resolve_truthy_env_var_choice( + choice=trace_execution, env=os.getenv(constants.MIDDLEWARE_FACTORY_TRACE_ENV, "false") + ) @functools.wraps(decorator) def final_decorator(func: Callable = None, **kwargs): diff --git a/aws_lambda_powertools/shared/constants.py b/aws_lambda_powertools/shared/constants.py index a3863c33286..27ab3c34197 100644 --- a/aws_lambda_powertools/shared/constants.py +++ b/aws_lambda_powertools/shared/constants.py @@ -1,4 +1,15 @@ -import os +TRACER_CAPTURE_RESPONSE_ENV: str = "POWERTOOLS_TRACER_CAPTURE_RESPONSE" +TRACER_CAPTURE_ERROR_ENV: str = "POWERTOOLS_TRACER_CAPTURE_ERROR" +TRACER_DISABLED_ENV: str = "POWERTOOLS_TRACE_DISABLED" -TRACER_CAPTURE_RESPONSE_ENV: str = os.getenv("POWERTOOLS_TRACER_CAPTURE_RESPONSE", "true") -TRACER_CAPTURE_ERROR_ENV: str = os.getenv("POWERTOOLS_TRACER_CAPTURE_ERROR", "true") +LOGGER_LOG_SAMPLING_RATE: str = "POWERTOOLS_LOGGER_SAMPLE_RATE" +LOGGER_LOG_EVENT_ENV: str = "POWERTOOLS_LOGGER_LOG_EVENT" + +MIDDLEWARE_FACTORY_TRACE_ENV: str = "POWERTOOLS_TRACE_MIDDLEWARES" + +METRICS_NAMESPACE_ENV: str = "POWERTOOLS_METRICS_NAMESPACE" + +SAM_LOCAL_ENV: str = "AWS_SAM_LOCAL" +CHALICE_LOCAL_ENV: str = "AWS_CHALICE_CLI_MODE" +SERVICE_NAME_ENV: str = "POWERTOOLS_SERVICE_NAME" +XRAY_TRACE_ID_ENV: str = "_X_AMZN_TRACE_ID" diff --git a/aws_lambda_powertools/shared/functions.py b/aws_lambda_powertools/shared/functions.py index 2a3af7db5f3..c8744143832 100644 --- a/aws_lambda_powertools/shared/functions.py +++ b/aws_lambda_powertools/shared/functions.py @@ -1,5 +1,42 @@ from distutils.util import strtobool +from typing import Any, Union -def resolve_env_var_choice(env: str, choice: bool = None) -> bool: +def resolve_truthy_env_var_choice(env: Any, choice: bool = None) -> bool: + """ Pick explicit choice over truthy env value, if available, otherwise return truthy env value + + NOTE: Environment variable should be resolved by the caller. + + Parameters + ---------- + env : Any + environment variable actual value + choice : bool + explicit choice + + Returns + ------- + choice : str + resolved choice as either bool or environment value + """ return choice if choice is not None else strtobool(env) + + +def resolve_env_var_choice(env: Any, choice: bool = None) -> Union[bool, Any]: + """ Pick explicit choice over env, if available, otherwise return env value received + + NOTE: Environment variable should be resolved by the caller. + + Parameters + ---------- + env : Any + environment variable actual value + choice : bool + explicit choice + + Returns + ------- + choice : str + resolved choice as either bool or environment value + """ + return choice if choice is not None else env diff --git a/aws_lambda_powertools/tracing/tracer.py b/aws_lambda_powertools/tracing/tracer.py index 2e083e5ebab..26e12a0fb63 100644 --- a/aws_lambda_powertools/tracing/tracer.py +++ b/aws_lambda_powertools/tracing/tracer.py @@ -4,14 +4,13 @@ import inspect import logging import os -from distutils.util import strtobool from typing import Any, Callable, Dict, List, Optional, Tuple import aws_xray_sdk import aws_xray_sdk.core -from aws_lambda_powertools.shared.constants import TRACER_CAPTURE_ERROR_ENV, TRACER_CAPTURE_RESPONSE_ENV -from aws_lambda_powertools.shared.functions import resolve_env_var_choice +from ..shared import constants +from ..shared.functions import resolve_truthy_env_var_choice is_cold_start = True logger = logging.getLogger(__name__) @@ -283,9 +282,12 @@ def handler(event, context): ) lambda_handler_name = lambda_handler.__name__ - - capture_response = resolve_env_var_choice(env=TRACER_CAPTURE_RESPONSE_ENV, choice=capture_response) - capture_error = resolve_env_var_choice(env=TRACER_CAPTURE_ERROR_ENV, choice=capture_error) + capture_response = resolve_truthy_env_var_choice( + env=os.getenv(constants.TRACER_CAPTURE_RESPONSE_ENV, "true"), choice=capture_response + ) + capture_error = resolve_truthy_env_var_choice( + env=os.getenv(constants.TRACER_CAPTURE_ERROR_ENV, "true"), choice=capture_error + ) @functools.wraps(lambda_handler) def decorate(event, context): @@ -478,8 +480,12 @@ async def async_tasks(): method_name = f"{method.__name__}" - capture_response = resolve_env_var_choice(env=TRACER_CAPTURE_RESPONSE_ENV, choice=capture_response) - capture_error = resolve_env_var_choice(env=TRACER_CAPTURE_ERROR_ENV, choice=capture_error) + capture_response = resolve_truthy_env_var_choice( + env=os.getenv(constants.TRACER_CAPTURE_RESPONSE_ENV, "true"), choice=capture_response + ) + capture_error = resolve_truthy_env_var_choice( + env=os.getenv(constants.TRACER_CAPTURE_ERROR_ENV, "true"), choice=capture_error + ) if inspect.iscoroutinefunction(method): return self._decorate_async_function( @@ -681,14 +687,13 @@ def _is_tracer_disabled() -> bool: bool """ logger.debug("Verifying whether Tracing has been disabled") - is_lambda_sam_cli = os.getenv("AWS_SAM_LOCAL") - is_chalice_cli = os.getenv("AWS_CHALICE_CLI_MODE") - env_option = str(os.getenv("POWERTOOLS_TRACE_DISABLED", "false")) - disabled_env = strtobool(env_option) + is_lambda_sam_cli = os.getenv(constants.SAM_LOCAL_ENV) + is_chalice_cli = os.getenv(constants.CHALICE_LOCAL_ENV) + is_disabled = resolve_truthy_env_var_choice(env=os.getenv(constants.TRACER_DISABLED_ENV, "false")) - if disabled_env: + if is_disabled: logger.debug("Tracing has been disabled via env var POWERTOOLS_TRACE_DISABLED") - return disabled_env + return is_disabled if is_lambda_sam_cli or is_chalice_cli: logger.debug("Running under SAM CLI env or not in Lambda env; disabling Tracing") @@ -706,7 +711,7 @@ def __build_config( ): """ Populates Tracer config for new and existing initializations """ is_disabled = disabled if disabled is not None else self._is_tracer_disabled() - is_service = service if service is not None else os.getenv("POWERTOOLS_SERVICE_NAME") + is_service = service if service is not None else os.getenv(constants.SERVICE_NAME_ENV) self._config["provider"] = provider if provider is not None else self._config["provider"] self._config["auto_patch"] = auto_patch if auto_patch is not None else self._config["auto_patch"] diff --git a/tests/functional/test_logger.py b/tests/functional/test_logger.py index 8e8025a6cb8..6c7862896a3 100644 --- a/tests/functional/test_logger.py +++ b/tests/functional/test_logger.py @@ -373,9 +373,9 @@ def test_logger_do_not_log_twice_when_root_logger_is_setup(stdout, service_name) # WHEN we create a new Logger and child Logger logger = Logger(service=service_name, stream=stdout) - child_logger = Logger(child=True, stream=stdout) - logger.info("hello") - child_logger.info("hello again") + child_logger = Logger(service=service_name, child=True, stream=stdout) + logger.info("PARENT") + child_logger.info("CHILD") # THEN it should only contain only two log entries # since child's log records propagated to root logger should be rejected diff --git a/tests/functional/test_shared_functions.py b/tests/functional/test_shared_functions.py index ac05babb753..cc4fd77fbe5 100644 --- a/tests/functional/test_shared_functions.py +++ b/tests/functional/test_shared_functions.py @@ -1,9 +1,11 @@ -from aws_lambda_powertools.shared.functions import resolve_env_var_choice +from aws_lambda_powertools.shared.functions import resolve_env_var_choice, resolve_truthy_env_var_choice def test_resolve_env_var_choice_explicit_wins_over_env_var(): - assert resolve_env_var_choice(env="true", choice=False) is False + assert resolve_truthy_env_var_choice(env="true", choice=False) is False + assert resolve_env_var_choice(env="something", choice=False) is False def test_resolve_env_var_choice_env_wins_over_absent_explicit(): - assert resolve_env_var_choice(env="true") == 1 + assert resolve_truthy_env_var_choice(env="true") == 1 + assert resolve_env_var_choice(env="something") == "something" diff --git a/tests/unit/test_tracing.py b/tests/unit/test_tracing.py index 71188e216ec..fdfdf5c6d6e 100644 --- a/tests/unit/test_tracing.py +++ b/tests/unit/test_tracing.py @@ -506,11 +506,9 @@ def test_tracer_lambda_handler_override_response_as_metadata(mocker, provider_st # GIVEN tracer is initialized provider = provider_stub(in_subsegment=in_subsegment_mock.in_subsegment) - mocker.patch("aws_lambda_powertools.tracing.tracer.TRACER_CAPTURE_RESPONSE_ENV", return_value=True) tracer = Tracer(provider=provider, auto_patch=False) - # WHEN capture_lambda_handler decorator is used - # with capture_response set to False + # WHEN capture_lambda_handler decorator is used with capture_response set to False @tracer.capture_lambda_handler(capture_response=False) def handler(event, context): return "response" @@ -526,8 +524,7 @@ def test_tracer_method_override_response_as_metadata(provider_stub, in_subsegmen provider = provider_stub(in_subsegment=in_subsegment_mock.in_subsegment) tracer = Tracer(provider=provider, auto_patch=False) - # WHEN capture_method decorator is used - # and the method response is empty + # WHEN capture_method decorator is used with capture_response set to False @tracer.capture_method(capture_response=False) def greeting(name, message): return "response" @@ -536,3 +533,37 @@ def greeting(name, message): # THEN we should not add any metadata assert in_subsegment_mock.put_metadata.call_count == 0 + + +def test_tracer_lambda_handler_override_error_as_metadata(mocker, provider_stub, in_subsegment_mock): + # GIVEN tracer is initialized + provider = provider_stub(in_subsegment=in_subsegment_mock.in_subsegment) + tracer = Tracer(provider=provider, auto_patch=False) + + # WHEN capture_lambda_handler decorator is used with capture_error set to False + @tracer.capture_lambda_handler(capture_error=False) + def handler(event, context): + raise ValueError("error") + + with pytest.raises(ValueError): + handler({}, mocker.MagicMock()) + + # THEN we should not add any metadata + assert in_subsegment_mock.put_metadata.call_count == 0 + + +def test_tracer_method_override_error_as_metadata(provider_stub, in_subsegment_mock): + # GIVEN tracer is initialized + provider = provider_stub(in_subsegment=in_subsegment_mock.in_subsegment) + tracer = Tracer(provider=provider, auto_patch=False) + + # WHEN capture_method decorator is used with capture_error set to False + @tracer.capture_method(capture_error=False) + def greeting(name, message): + raise ValueError("error") + + with pytest.raises(ValueError): + greeting(name="Foo", message="Bar") + + # THEN we should not add any metadata + assert in_subsegment_mock.put_metadata.call_count == 0