diff --git a/Makefile b/Makefile index ad21d25a3..e490abde6 100644 --- a/Makefile +++ b/Makefile @@ -14,6 +14,9 @@ spec: unzip -d spec CloudFormationResourceSpecification.zip rm CloudFormationResourceSpecification.zip +spec2: + curl -O https://d1uauaxba7bl26.cloudfront.net/latest/CloudFormationResourceSpecification.json + 2to3: 2to3 -n -w examples > 2to3-examples.patch 2to3 -n -w troposphere > 2to3-troposphere.patch diff --git a/batch_validator.py b/batch_validator.py new file mode 100644 index 000000000..71cd818e9 --- /dev/null +++ b/batch_validator.py @@ -0,0 +1,42 @@ +from .validators import exactly_one + + +class LaunchTemplateSpecificationMixin(object): + def validate(self): + template_ids = [ + 'LaunchTemplateId', + 'LaunchTemplateName' + ] + exactly_one(self.__class__.__name__, self.properties, template_ids) + + +def validate_environment_state(environment_state): + """ Validate response type + :param environment_state: State of the environment + :return: The provided value if valid + """ + valid_states = [ + "ENABLED", + "DISABLED" + ] + if environment_state not in valid_states: + raise ValueError( + "{} is not a valid environment state".format(environment_state) + ) + return environment_state + + +def validate_queue_state(queue_state): + """ Validate response type + :param queue_state: State of the queue + :return: The provided value if valid + """ + valid_states = [ + "ENABLED", + "DISABLED" + ] + if queue_state not in valid_states: + raise ValueError( + "{} is not a valid queue state".format(queue_state) + ) + return queue_state diff --git a/examples/S3_Bucket_With_Versioning_And_Lifecycle_Rules.py b/examples/S3_Bucket_With_Versioning_And_Lifecycle_Rules.py index 0aceded8b..0ceb15293 100644 --- a/examples/S3_Bucket_With_Versioning_And_Lifecycle_Rules.py +++ b/examples/S3_Bucket_With_Versioning_And_Lifecycle_Rules.py @@ -3,8 +3,8 @@ from troposphere import Output, Ref, Template from troposphere.s3 import Bucket, PublicRead, VersioningConfiguration, \ - LifecycleConfiguration, LifecycleRule, NoncurrentVersionTransition, \ - LifecycleRuleTransition + LifecycleConfiguration, Rule, NoncurrentVersionTransition, \ + Transition t = Template() @@ -30,7 +30,7 @@ LifecycleConfiguration=LifecycleConfiguration(Rules=[ # Add a rule to - LifecycleRule( + Rule( # Rule attributes Id="S3BucketRule001", Prefix="/only-this-sub-dir", @@ -38,7 +38,7 @@ # Applies to current objects ExpirationInDays=3650, Transitions=[ - LifecycleRuleTransition( + Transition( StorageClass="STANDARD_IA", TransitionInDays=60, ), diff --git a/scripts/gen.py b/scripts/gen.py index e6354208f..a8a850954 100644 --- a/scripts/gen.py +++ b/scripts/gen.py @@ -1,5 +1,6 @@ import argparse import json +import yaml import sys @@ -7,6 +8,25 @@ # Python code generator to create new troposphere classes from the # AWS resource specification. # +# This gnerator works by reading in an AWS resource specification json file. +# The resources and properties are split apart to align with a given output +# file. In other words, a type such as AWS::Batch::JobDefinition will be +# put into the batch.py file. +# +# Since there are usually discrepencies in the docs or spec files plus the +# need for validation routines to be included, there is now a YAML file to +# provide these overrides. The validators can override both at a class level +# to validate multiple properties or object consistency using a class Mixin, +# and a property level to validate the contents of that property using a +# simple function. The property required field can also be overriden. +# +# The validators for a given file are now put into a separate file, +# ending in _validators.py (such as batch_validators.py). +# +# Care is given to the output file to ensure pycodestyle and pyflakes tests +# will still pass. This incudes import declarations, class output ordering, +# and spacing considerations. +# # Todo: # - Currently only handles the single files (not the all-in-one) # (Note: but will deal with things like spec/GuardDuty*) @@ -16,14 +36,280 @@ # - Need to figure out the correct Timestamp type copyright_header = """\ -# Copyright (c) 2012-2018, Mark Peek +# Copyright (c) 2012-2019, Mark Peek # All rights reserved. # # See LICENSE file for full license. +# +# *** Do not modify - this file is autogenerated *** +# Resource specification version: %s -from . import AWSObject, AWSProperty -from .validators import boolean, integer """ +spec_version = "" + + +class Override(object): + """Handle overrides to the base resource specification. + + While the resource specification is the main source of truth for + CloudFormation resources and properties, there are sometimes bugs + or issues which require manual overrides. In addition, this handles + specifying more specific property and object validation functions. + """ + + def __init__(self, filename): + self.base = 'troposphere/' + self.filename = filename + try: + self.override = yaml.load(open(self.base + filename + ".yaml")) + except (OSError, IOError): + self.override = {} + + def get_header(self): + return self.override.get('header', "") + + def get_required(self, class_name, prop): + if self.override: + try: + v = self.override['classes'][class_name][prop]['required'] + return v + except KeyError: + return None + + def get_validator(self, class_name, prop): + if self.override: + try: + v = self.override['classes'][class_name][prop]['validator'] + return v.lstrip('common/') + except KeyError: + return None + + def get_class_validator(self, class_name): + if self.override: + try: + v = self.override['classes'][class_name]['validator'] + return v.lstrip('common/') + except KeyError: + return None + + def get_validator_list(self): + """Return a list of validators specified in the override file""" + ignore = [ + 'dict', + ] + vlist = [] + if not self.override: + return vlist + + for k, v in self.override['classes'].items(): + if 'validator' in v: + validator = v['validator'] + if validator not in ignore and validator not in vlist: + vlist.append(validator) + + for k, v in self.override['classes'].items(): + for kp, vp in v.items(): + if 'validator' in vp: + validator = vp['validator'] + if validator not in ignore and validator not in vlist: + vlist.append(validator) + return sorted(vlist) + + +class Node(object): + """Node object for building a per-file/service dependecy tree. + + Simple node object for creating and traversing the resource and + property dependencies to emit code resources in a well-defined order. + """ + + def __init__(self, name, props, resource_name): + self.name = name + self.props = props + self.resource_name = resource_name + self.children = [] + + def add_child(self, node): + self.children.append(node) + + +class File(object): + """Decribes a file object which contains resources for a given AWS service. + + The main output of this generator is a file containing all the property + and resource classes for a given AWS service. This handles various needs + such as imported objects, predictive ordering objects, and handling the + type and validation overrides. The objects are mapped into the file + based on the resource type. + """ + + def __init__(self, filename): + self.filename = filename + self.imports = {} + self.properties = {} + self.resources = {} + self.resource_names = {} + self.override = Override(filename) + + def add_property(self, class_name, property_spec): + self.properties[class_name] = property_spec + + def add_resource(self, class_name, resource_spec, resource_name): + self.resources[class_name] = resource_spec + self.resource_names[class_name] = resource_name + + def _output_tags(self): + """Look for a Tags object to output a Tags import""" + for class_name, properties in sorted(self.resources.items()): + for key, value in sorted(properties.iteritems()): + validator = self.override.get_validator(class_name, key) + if key == 'Tags' and validator is None: + print "from troposphere import Tags" + return + for class_name, properties in sorted(self.properties.items()): + for key, value in sorted(properties.iteritems()): + validator = self.override.get_validator(class_name, key) + if key == 'Tags' and validator is None: + print "from troposphere import Tags" + return + + def _check_type(self, check_type, properties): + """Decode a properties type looking for a specific type.""" + if 'PrimitiveType' in properties: + return properties['PrimitiveType'] == check_type + if properties['Type'] == 'List': + if 'ItemType' in properties: + return properties['ItemType'] == check_type + else: + return properties['PrimitiveItemType'] == check_type + return False + + def _walk_for_type(self, check_type): + """Walk the resources/properties looking for a specific type.""" + for class_name, properties in sorted(self.resources.items()): + for key, value in sorted(properties.iteritems()): + if self._check_type(check_type, value): + return True + for class_name, properties in sorted(self.properties.items()): + for key, value in sorted(properties.iteritems()): + if self._check_type(check_type, value): + return True + + return False + + def _get_property_type(self, value): + """Decode the values type and return a non-primitive property type.""" + if 'PrimitiveType' in value: + return None + if value['Type'] == 'List': + if 'ItemType' in value: + return value['ItemType'] + else: + return None + elif value['Type'] == 'Map': + return None + else: + # Non-primitive (Property) name + return value['Type'] + + def _get_type_list(self, props): + """Return a list of non-primitive types used by this object.""" + type_list = [] + for k, v in props.items(): + t = self._get_property_type(v) + if t is not None: + type_list.append(t) + return sorted(type_list) + + def _output_validators(self): + """Output common validator types based on usage.""" + if self._walk_for_type('Boolean'): + print "from .validators import boolean" + if self._walk_for_type('Integer'): + print "from .validators import integer" + vlist = self.override.get_validator_list() + for override in vlist: + if override.startswith('common/'): + override = override.lstrip('common/') + filename = "validators" + else: + filename = "%s_validators" % self.filename + print "from .%s import %s" % (filename, override) + + def _output_imports(self): + """Output imports for base troposphere class types.""" + if self.resources: + print "from . import AWSObject" + if self.properties: + print "from . import AWSProperty" + + def build_tree(self, name, props, resource_name=None): + """Build a tree of non-primitive typed dependency order.""" + n = Node(name, props, resource_name) + prop_type_list = self._get_type_list(props) + if not prop_type_list: + return n + prop_type_list = sorted(prop_type_list) + for prop_name in prop_type_list: + if prop_name == 'Tag': + continue + child = self.build_tree(prop_name, self.properties[prop_name]) + if child is not None: + n.add_child(child) + return n + + def output_tree(self, t, seen): + """Given a dependency tree of objects, output it in DFS order.""" + if not t: + return + for c in t.children: + self.output_tree(c, seen) + if t.name in seen: + return + seen[t.name] = True + if t.resource_name: + output_class(t.name, t.props, self.override, t.resource_name) + else: + output_class(t.name, t.props, self.override) + + def output(self): + """Output the generated source file.""" + print copyright_header % spec_version, + self._output_imports() + self._output_tags() + self._output_validators() + header = self.override.get_header() + if header: + print + print + print header.rstrip() + + seen = {} + for class_name, properties in sorted(self.resources.items()): + resource_name = self.resource_names[class_name] + t = self.build_tree(class_name, properties, resource_name) + self.output_tree(t, seen) + + +class Resources(object): + def __init__(self): + self.files = {} + + def _filename_map(self, name): + return name.split(":")[2].lower() + + def get_file(self, aws_name): + filename = self._filename_map(aws_name) + if filename not in self.files: + self.files[filename] = File(filename) + return self.files[filename] + + def output_file(self, name): + self.files[name].output() + + def output_files(self): + for name, file in sorted(self.files.items()): + file.output() def get_required(value): @@ -90,15 +376,22 @@ def get_type3(value): raise ValueError("get_type") -def output_class(class_name, properties, resource_name=None): +def output_class(class_name, properties, override, resource_name=None): print print + class_validator = override.get_class_validator(class_name) + mixin = "" + if class_validator: + mixin = "%s, " % class_validator + linebreak = "" + if len(mixin) > 28: + linebreak = "\n%s" % (' '*8) if resource_name: - print 'class %s(AWSObject):' % class_name + print 'class %s(%s%sAWSObject):' % (class_name, linebreak, mixin) print ' resource_type = "%s"' % resource_name print else: - print 'class %s(AWSProperty):' % class_name + print 'class %s(%s%sAWSProperty):' % (class_name, linebreak, mixin) # Output the props dict print ' props = {' @@ -108,13 +401,21 @@ def output_class(class_name, properties, resource_name=None): else: value_type = get_type(value) + custom_validator = override.get_validator(class_name, key) + if custom_validator is not None: + value_type = custom_validator + + required = override.get_required(class_name, key) + if required is None: + required = get_required(value) + # Wrap long names for pycodestyle if len(key) + len(value_type) < 55: print " '%s': (%s, %s)," % ( - key, value_type, get_required(value)) + key, value_type, required) else: print " '%s':\n (%s, %s)," % ( - key, value_type, get_required(value)) + key, value_type, required) print ' }' @@ -186,14 +487,32 @@ def process_file(filename, stub=False): if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--stub', action='store_true', default=False) + parser.add_argument('--name', action="store") parser.add_argument('filename', nargs='+') args = parser.parse_args() - if args.stub: - print copyright_header, - for f in args.filename: - process_file(f, stub=True) + f = open(args.filename[0]) + j = json.load(f) + + spec_version = j['ResourceSpecificationVersion'] + + r = Resources() + + for resource_name, resource_dict in sorted(j['ResourceTypes'].items()): + f = r.get_file(resource_name) + class_name = resource_name.split(':')[4] + properties = resource_dict['Properties'] + f.add_resource(class_name, properties, resource_name) + + for property_name, property_dict in sorted(j['PropertyTypes'].items()): + if property_name == "Tag": + continue + f = r.get_file(property_name) + class_name = property_name.split('.')[1] + properties = property_dict['Properties'] + f.add_property(class_name, properties) + + if args.name: + r.output_file(args.name) else: - print copyright_header, - for f in args.filename: - process_file(f) + r.output_files() diff --git a/tests/test_batch.py b/tests/test_batch.py new file mode 100644 index 000000000..13a3971ab --- /dev/null +++ b/tests/test_batch.py @@ -0,0 +1,33 @@ +import unittest +from troposphere.batch import ComputeEnvironment + + +class TestModel(unittest.TestCase): + def test_ComputeEnvironmentState(self): + ce = ComputeEnvironment( + "Compute", + ServiceRole="test", + State="DISABLED", + Type="test", + ) + ce.validate() + + ce = ComputeEnvironment( + "Compute", + ServiceRole="test", + State="ENABLED", + Type="test", + ) + ce.validate() + + with self.assertRaises(ValueError): + ce = ComputeEnvironment( + "Compute", + ServiceRole="test", + State="test", + Type="test", + ) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_serverless.py b/tests/test_serverless.py index 11c89172e..bda3fd0f9 100644 --- a/tests/test_serverless.py +++ b/tests/test_serverless.py @@ -1,6 +1,6 @@ import unittest from troposphere import Tags, Template -from troposphere.s3 import Filter, Rules, S3Key +from troposphere.s3 import NotificationFilter, FilterRule, S3KeyFilter from troposphere.serverless import ( Api, DeadLetterQueue, DeploymentPreference, Function, FunctionForPackaging, S3Event, S3Location, SimpleTable, @@ -157,10 +157,10 @@ def test_s3_filter(self): 'FileUpload', Bucket="bucket", Events=['s3:ObjectCreated:*'], - Filter=Filter(S3Key=S3Key( + Filter=NotificationFilter(S3Key=S3KeyFilter( Rules=[ - Rules(Name="prefix", Value="upload/"), - Rules(Name="suffix", Value=".txt"), + FilterRule(Name="prefix", Value="upload/"), + FilterRule(Name="suffix", Value=".txt"), ], )) ) diff --git a/troposphere/batch.py b/troposphere/batch.py index 70fb0da4b..be0464a03 100644 --- a/troposphere/batch.py +++ b/troposphere/batch.py @@ -1,106 +1,133 @@ -from . import AWSObject, AWSProperty -from .validators import exactly_one, integer, positive_integer +# Copyright (c) 2012-2019, Mark Peek +# All rights reserved. +# +# See LICENSE file for full license. +# +# *** Do not modify - this file is autogenerated *** +# Resource specification version: 2.20.0 + +from . import AWSObject +from . import AWSProperty +from .validators import boolean +from .validators import integer +from .batch_validators import LaunchTemplateSpecificationMixin +from .batch_validators import validate_environment_state +from .batch_validators import validate_queue_state + + +class LaunchTemplateSpecification( + LaunchTemplateSpecificationMixin, AWSProperty): + props = { + 'LaunchTemplateId': (basestring, False), + 'LaunchTemplateName': (basestring, False), + 'Version': (basestring, False), + } -class LaunchTemplateSpecification(AWSProperty): +class ComputeResources(AWSProperty): props = { - "LaunchTemplateId": (basestring, False), - "LaunchTemplateName": (basestring, False), - "Version": (basestring, False), + 'BidPercentage': (integer, False), + 'DesiredvCpus': (integer, False), + 'Ec2KeyPair': (basestring, False), + 'ImageId': (basestring, False), + 'InstanceRole': (basestring, True), + 'InstanceTypes': ([basestring], True), + 'LaunchTemplate': (LaunchTemplateSpecification, False), + 'MaxvCpus': (integer, True), + 'MinvCpus': (integer, True), + 'PlacementGroup': (basestring, False), + 'SecurityGroupIds': ([basestring], True), + 'SpotIamFleetRole': (basestring, False), + 'Subnets': ([basestring], True), + 'Tags': (dict, False), + 'Type': (basestring, True), } - def validate(self): - template_ids = [ - 'LaunchTemplateId', - 'LaunchTemplateName' - ] - exactly_one(self.__class__.__name__, self.properties, template_ids) +class ComputeEnvironment(AWSObject): + resource_type = "AWS::Batch::ComputeEnvironment" -class ComputeResources(AWSProperty): + props = { + 'ComputeEnvironmentName': (basestring, False), + 'ComputeResources': (ComputeResources, False), + 'ServiceRole': (basestring, True), + 'State': (validate_environment_state, False), + 'Type': (basestring, True), + } + +class Environment(AWSProperty): props = { - "SpotIamFleetRole": (basestring, False), - "MaxvCpus": (positive_integer, True), - "SecurityGroupIds": ([basestring], True), - "BidPercentage": (positive_integer, False), - "Type": (basestring, True), - "Subnets": ([basestring], True), - "MinvCpus": (positive_integer, True), - "LaunchTemplate": (LaunchTemplateSpecification, False), - "ImageId": (basestring, False), - "InstanceRole": (basestring, True), - "InstanceTypes": ([basestring], True), - "Ec2KeyPair": (basestring, False), - "PlacementGroup": (basestring, False), - "Tags": (dict, False), - "DesiredvCpus": (positive_integer, False) + 'Name': (basestring, False), + 'Value': (basestring, False), } class MountPoints(AWSProperty): + props = { + 'ContainerPath': (basestring, False), + 'ReadOnly': (boolean, False), + 'SourceVolume': (basestring, False), + } + +class Ulimit(AWSProperty): props = { - "ReadOnly": (bool, False), - "SourceVolume": (basestring, False), - "ContainerPath": (basestring, False) + 'HardLimit': (integer, True), + 'Name': (basestring, True), + 'SoftLimit': (integer, True), } class VolumesHost(AWSProperty): - props = { - "SourcePath": (basestring, False) + 'SourcePath': (basestring, False), } class Volumes(AWSProperty): - props = { - "Host": (VolumesHost, False), - "Name": (basestring, False) + 'Host': (VolumesHost, False), + 'Name': (basestring, False), } -class Environment(AWSProperty): - +class ContainerProperties(AWSProperty): props = { - "Value": (basestring, False), - "Name": (basestring, False) + 'Command': ([basestring], False), + 'Environment': ([Environment], False), + 'Image': (basestring, True), + 'InstanceType': (basestring, False), + 'JobRoleArn': (basestring, False), + 'Memory': (integer, True), + 'MountPoints': ([MountPoints], False), + 'Privileged': (boolean, False), + 'ReadonlyRootFilesystem': (boolean, False), + 'Ulimits': ([Ulimit], False), + 'User': (basestring, False), + 'Vcpus': (integer, True), + 'Volumes': ([Volumes], False), } -class Ulimit(AWSProperty): - +class NodeRangeProperty(AWSProperty): props = { - "SoftLimit": (positive_integer, True), - "HardLimit": (positive_integer, True), - "Name": (basestring, True) + 'Container': (ContainerProperties, False), + 'TargetNodes': (basestring, True), } -class ContainerProperties(AWSProperty): - +class NodeProperties(AWSProperty): props = { - "MountPoints": ([MountPoints], False), - "User": (basestring, False), - "Volumes": ([Volumes], False), - "Command": ([basestring], False), - "Memory": (positive_integer, True), - "Privileged": (bool, False), - "Environment": ([Environment], False), - "JobRoleArn": (basestring, False), - "ReadonlyRootFilesystem": (bool, False), - "Ulimits": ([Ulimit], False), - "Vcpus": (positive_integer, True), - "Image": (basestring, True) + 'MainNode': (integer, True), + 'NodeRangeProperties': ([NodeRangeProperty], True), + 'NumNodes': (integer, True), } class RetryStrategy(AWSProperty): - props = { - "Attempts": (positive_integer, False) + 'Attempts': (integer, False), } @@ -114,73 +141,29 @@ class JobDefinition(AWSObject): resource_type = "AWS::Batch::JobDefinition" props = { - 'ContainerProperties': (ContainerProperties, True), + 'ContainerProperties': (ContainerProperties, False), 'JobDefinitionName': (basestring, False), - 'Parameters': (dict, True), + 'NodeProperties': (NodeProperties, False), + 'Parameters': (dict, False), 'RetryStrategy': (RetryStrategy, False), 'Timeout': (Timeout, False), 'Type': (basestring, True), } -def validate_environment_state(environment_state): - """ Validate response type - :param environment_state: State of the environment - :return: The provided value if valid - """ - valid_states = [ - "ENABLED", - "DISABLED" - ] - if environment_state not in valid_states: - raise ValueError( - "{} is not a valid environment state".format(environment_state) - ) - return environment_state - - -class ComputeEnvironment(AWSObject): - resource_type = "AWS::Batch::ComputeEnvironment" - - props = { - "Type": (basestring, True), - "ServiceRole": (basestring, True), - "ComputeEnvironmentName": (basestring, False), - "ComputeResources": (ComputeResources, True), - "State": (validate_environment_state, False) - } - - class ComputeEnvironmentOrder(AWSProperty): - props = { - "ComputeEnvironment": (basestring, True), - "Order": (positive_integer, True) + 'ComputeEnvironment': (basestring, True), + 'Order': (integer, True), } -def validate_queue_state(queue_state): - """ Validate response type - :param queue_state: State of the queue - :return: The provided value if valid - """ - valid_states = [ - "ENABLED", - "DISABLED" - ] - if queue_state not in valid_states: - raise ValueError( - "{} is not a valid queue state".format(queue_state) - ) - return queue_state - - class JobQueue(AWSObject): resource_type = "AWS::Batch::JobQueue" props = { - "ComputeEnvironmentOrder": ([ComputeEnvironmentOrder], True), - "Priority": (positive_integer, True), - "State": (validate_queue_state, False), - "JobQueueName": (basestring, False) + 'ComputeEnvironmentOrder': ([ComputeEnvironmentOrder], True), + 'JobQueueName': (basestring, False), + 'Priority': (integer, True), + 'State': (validate_queue_state, False), } diff --git a/troposphere/batch.yaml b/troposphere/batch.yaml new file mode 100644 index 000000000..a7150b286 --- /dev/null +++ b/troposphere/batch.yaml @@ -0,0 +1,13 @@ +# AWS::Batch overrides +classes: + ComputeEnvironment: + State: + validator: validate_environment_state + ComputeResources: + Tags: + validator: dict + JobQueue: + State: + validator: validate_queue_state + LaunchTemplateSpecification: + validator: LaunchTemplateSpecificationMixin diff --git a/troposphere/batch_validators.py b/troposphere/batch_validators.py new file mode 100644 index 000000000..71cd818e9 --- /dev/null +++ b/troposphere/batch_validators.py @@ -0,0 +1,42 @@ +from .validators import exactly_one + + +class LaunchTemplateSpecificationMixin(object): + def validate(self): + template_ids = [ + 'LaunchTemplateId', + 'LaunchTemplateName' + ] + exactly_one(self.__class__.__name__, self.properties, template_ids) + + +def validate_environment_state(environment_state): + """ Validate response type + :param environment_state: State of the environment + :return: The provided value if valid + """ + valid_states = [ + "ENABLED", + "DISABLED" + ] + if environment_state not in valid_states: + raise ValueError( + "{} is not a valid environment state".format(environment_state) + ) + return environment_state + + +def validate_queue_state(queue_state): + """ Validate response type + :param queue_state: State of the queue + :return: The provided value if valid + """ + valid_states = [ + "ENABLED", + "DISABLED" + ] + if queue_state not in valid_states: + raise ValueError( + "{} is not a valid queue state".format(queue_state) + ) + return queue_state diff --git a/troposphere/codebuild.py b/troposphere/codebuild.py index c820c3930..ebe5793c7 100644 --- a/troposphere/codebuild.py +++ b/troposphere/codebuild.py @@ -162,7 +162,7 @@ def validate(self): ) auth = self.properties.get('Auth') - if auth is not None and source_type is not 'GITHUB': + if auth is not None and source_type != 'GITHUB': raise ValueError("SourceAuth: must only be defined when using " "'GITHUB' Source Type.") diff --git a/troposphere/s3.py b/troposphere/s3.py index b852528a1..03cad4c9f 100644 --- a/troposphere/s3.py +++ b/troposphere/s3.py @@ -1,12 +1,21 @@ -# Copyright (c) 2013, Bob Van Zant +# Copyright (c) 2012-2019, Mark Peek # All rights reserved. # # See LICENSE file for full license. -import warnings +# +# *** Do not modify - this file is autogenerated *** +# Resource specification version: 2.20.0 + +from . import AWSObject +from . import AWSProperty +from troposphere import Tags +from .validators import boolean +from .validators import integer +from .s3_validators import BucketMixin +from .s3_validators import RuleMixin +from .validators import positive_integer +from .s3_validators import s3_transfer_acceleration_status -from . import AWSHelperFn, AWSObject, AWSProperty, Tags -from .validators import boolean, positive_integer, s3_bucket_name -from .validators import s3_transfer_acceleration_status try: from awacs.aws import Policy @@ -24,80 +33,97 @@ LogDeliveryWrite = "LogDeliveryWrite" -class CorsRules(AWSProperty): +class AccelerateConfiguration(AWSProperty): props = { - 'AllowedHeaders': ([basestring], False), - 'AllowedMethods': ([basestring], True), - 'AllowedOrigins': ([basestring], True), - 'ExposedHeaders': ([basestring], False), - 'Id': (basestring, False), - 'MaxAge': (positive_integer, False), + 'AccelerationStatus': (s3_transfer_acceleration_status, True), } -class CorsConfiguration(AWSProperty): +class Destination(AWSProperty): props = { - 'CorsRules': ([CorsRules], True), + 'BucketAccountId': (basestring, False), + 'BucketArn': (basestring, True), + 'Format': (basestring, True), + 'Prefix': (basestring, False), } -class VersioningConfiguration(AWSProperty): +class DataExport(AWSProperty): props = { - 'Status': (basestring, False), + 'Destination': (Destination, True), + 'OutputSchemaVersion': (basestring, True), } -class AccelerateConfiguration(AWSProperty): +class StorageClassAnalysis(AWSProperty): props = { - 'AccelerationStatus': (s3_transfer_acceleration_status, True), + 'DataExport': (DataExport, False), } -class RedirectAllRequestsTo(AWSProperty): +class TagFilter(AWSProperty): props = { - 'HostName': (basestring, True), - 'Protocol': (basestring, False), + 'Key': (basestring, True), + 'Value': (basestring, True), } -class RedirectRule(AWSProperty): +class AnalyticsConfiguration(AWSProperty): props = { - 'HostName': (basestring, False), - 'HttpRedirectCode': (basestring, False), - 'Protocol': (basestring, False), - 'ReplaceKeyPrefixWith': (basestring, False), - 'ReplaceKeyWith': (basestring, False), + 'Id': (basestring, True), + 'Prefix': (basestring, False), + 'StorageClassAnalysis': (StorageClassAnalysis, True), + 'TagFilters': ([TagFilter], False), } -class RoutingRuleCondition(AWSProperty): +class ServerSideEncryptionByDefault(AWSProperty): props = { - 'HttpErrorCodeReturnedEquals': (basestring, False), - 'KeyPrefixEquals': (basestring, False), + 'KMSMasterKeyID': (basestring, False), + 'SSEAlgorithm': (basestring, True), } -class RoutingRule(AWSProperty): +class ServerSideEncryptionRule(AWSProperty): props = { - 'RedirectRule': (RedirectRule, True), - 'RoutingRuleCondition': (RoutingRuleCondition, False), + 'ServerSideEncryptionByDefault': + (ServerSideEncryptionByDefault, False), } -class WebsiteConfiguration(AWSProperty): +class BucketEncryption(AWSProperty): props = { - 'IndexDocument': (basestring, False), - 'ErrorDocument': (basestring, False), - 'RedirectAllRequestsTo': (RedirectAllRequestsTo, False), - 'RoutingRules': ([RoutingRule], False), + 'ServerSideEncryptionConfiguration': + ([ServerSideEncryptionRule], True), } -class LifecycleRuleTransition(AWSProperty): +class CorsRule(AWSProperty): props = { - 'StorageClass': (basestring, True), - 'TransitionDate': (basestring, False), - 'TransitionInDays': (positive_integer, False), + 'AllowedHeaders': ([basestring], False), + 'AllowedMethods': ([basestring], True), + 'AllowedOrigins': ([basestring], True), + 'ExposedHeaders': ([basestring], False), + 'Id': (basestring, False), + 'MaxAge': (integer, False), + } + + +class CorsConfiguration(AWSProperty): + props = { + 'CorsRules': ([CorsRule], True), + } + + +class InventoryConfiguration(AWSProperty): + props = { + 'Destination': (Destination, True), + 'Enabled': (boolean, True), + 'Id': (basestring, True), + 'IncludedObjectVersions': (basestring, True), + 'OptionalFields': ([basestring], False), + 'Prefix': (basestring, False), + 'ScheduleFrequency': (basestring, True), } @@ -114,14 +140,15 @@ class NoncurrentVersionTransition(AWSProperty): } -class TagFilter(AWSProperty): +class Transition(AWSProperty): props = { - 'Key': (basestring, True), - 'Value': (basestring, True), + 'StorageClass': (basestring, True), + 'TransitionDate': (basestring, False), + 'TransitionInDays': (integer, False), } -class LifecycleRule(AWSProperty): +class Rule(RuleMixin, AWSProperty): props = { 'AbortIncompleteMultipartUpload': (AbortIncompleteMultipartUpload, False), @@ -130,121 +157,94 @@ class LifecycleRule(AWSProperty): 'Id': (basestring, False), 'NoncurrentVersionExpirationInDays': (positive_integer, False), 'NoncurrentVersionTransition': (NoncurrentVersionTransition, False), - 'NoncurrentVersionTransitions': ([NoncurrentVersionTransition], False), + 'NoncurrentVersionTransitions': + ([NoncurrentVersionTransition], False), 'Prefix': (basestring, False), 'Status': (basestring, True), 'TagFilters': ([TagFilter], False), - 'Transition': (LifecycleRuleTransition, False), - 'Transitions': ([LifecycleRuleTransition], False) - } - - def validate(self): - if 'Transition' in self.properties: - if 'Transitions' not in self.properties: - # aws moved from a single transition to a list of them - # and deprecated 'Transition', so let's just move it to - # the new property and not annoy the user. - self.properties['Transitions'] = [ - self.properties.pop('Transition')] - else: - raise ValueError( - 'Cannot specify both "Transition" and "Transitions" ' - 'properties on S3 Bucket Lifecycle Rule. Please use ' - '"Transitions" since the former has been deprecated.') - - if 'NoncurrentVersionTransition' in self.properties: - if 'NoncurrentVersionTransitions' not in self.properties: - warnings.warn( - 'NoncurrentVersionTransition has been deprecated in ' - 'favour of NoncurrentVersionTransitions.' - ) - # Translate the old transition format to the new format - self.properties['NoncurrentVersionTransitions'] = [ - self.properties.pop('NoncurrentVersionTransition')] - else: - raise ValueError( - 'Cannot specify both "NoncurrentVersionTransition" and ' - '"NoncurrentVersionTransitions" properties on S3 Bucket ' - 'Lifecycle Rule. Please use ' - '"NoncurrentVersionTransitions" since the former has been ' - 'deprecated.') - - if 'ExpirationInDays' in self.properties and 'ExpirationDate' in \ - self.properties: - raise ValueError( - 'Cannot specify both "ExpirationDate" and "ExpirationInDays"' - ) + 'Transition': (Transition, False), + 'Transitions': ([Transition], False), + } class LifecycleConfiguration(AWSProperty): props = { - 'Rules': ([LifecycleRule], True), + 'Rules': ([Rule], True), } class LoggingConfiguration(AWSProperty): props = { - 'DestinationBucketName': (s3_bucket_name, False), + 'DestinationBucketName': (basestring, False), 'LogFilePrefix': (basestring, False), } -class Rules(AWSProperty): +class MetricsConfiguration(AWSProperty): + props = { + 'Id': (basestring, True), + 'Prefix': (basestring, False), + 'TagFilters': ([TagFilter], False), + } + + +class FilterRule(AWSProperty): props = { 'Name': (basestring, True), - 'Value': (basestring, True) + 'Value': (basestring, True), } -class S3Key(AWSProperty): +class S3KeyFilter(AWSProperty): props = { - 'Rules': ([Rules], True) + 'Rules': ([FilterRule], True), } -class Filter(AWSProperty): +class NotificationFilter(AWSProperty): props = { - 'S3Key': (S3Key, True) + 'S3Key': (S3KeyFilter, True), } -class LambdaConfigurations(AWSProperty): +class LambdaConfiguration(AWSProperty): props = { 'Event': (basestring, True), - 'Filter': (Filter, False), + 'Filter': (NotificationFilter, False), 'Function': (basestring, True), } -class QueueConfigurations(AWSProperty): +class QueueConfiguration(AWSProperty): props = { 'Event': (basestring, True), - 'Filter': (Filter, False), + 'Filter': (NotificationFilter, False), 'Queue': (basestring, True), } -class TopicConfigurations(AWSProperty): +class TopicConfiguration(AWSProperty): props = { 'Event': (basestring, True), - 'Filter': (Filter, False), + 'Filter': (NotificationFilter, False), 'Topic': (basestring, True), } -class MetricsConfiguration(AWSProperty): +class NotificationConfiguration(AWSProperty): props = { - 'Id': (basestring, True), - 'Prefix': (basestring, False), - 'TagFilters': ([TagFilter], False), + 'LambdaConfigurations': ([LambdaConfiguration], False), + 'QueueConfigurations': ([QueueConfiguration], False), + 'TopicConfigurations': ([TopicConfiguration], False), } -class NotificationConfiguration(AWSProperty): +class PublicAccessBlockConfiguration(AWSProperty): props = { - 'LambdaConfigurations': ([LambdaConfigurations], False), - 'QueueConfigurations': ([QueueConfigurations], False), - 'TopicConfigurations': ([TopicConfigurations], False), + 'BlockPublicAcls': (boolean, False), + 'BlockPublicPolicy': (boolean, False), + 'IgnorePublicAcls': (boolean, False), + 'RestrictPublicBuckets': (boolean, False), } @@ -260,7 +260,7 @@ class EncryptionConfiguration(AWSProperty): } -class ReplicationConfigurationRulesDestination(AWSProperty): +class ReplicationDestination(AWSProperty): props = { 'AccessControlTranslation': (AccessControlTranslation, False), 'Account': (basestring, False), @@ -282,142 +282,97 @@ class SourceSelectionCriteria(AWSProperty): } -class ReplicationConfigurationRules(AWSProperty): +class ReplicationRule(AWSProperty): props = { - 'Destination': (ReplicationConfigurationRulesDestination, True), + 'Destination': (ReplicationDestination, True), 'Id': (basestring, False), 'Prefix': (basestring, True), 'SourceSelectionCriteria': (SourceSelectionCriteria, False), - 'Status': (basestring, True) + 'Status': (basestring, True), } class ReplicationConfiguration(AWSProperty): props = { 'Role': (basestring, True), - 'Rules': ([ReplicationConfigurationRules], True) - } - - -class Destination(AWSProperty): - props = { - 'BucketAccountId': (basestring, False), - 'BucketArn': (basestring, True), - 'Format': (basestring, True), - 'Prefix': (basestring, False), - } - - -class DataExport(AWSProperty): - props = { - 'Destination': (Destination, True), - 'OutputSchemaVersion': (basestring, True), + 'Rules': ([ReplicationRule], True), } -class StorageClassAnalysis(AWSProperty): - props = { - 'DataExport': (DataExport, False), - } - - -class AnalyticsConfiguration(AWSProperty): +class VersioningConfiguration(AWSProperty): props = { - 'Id': (basestring, True), - 'Prefix': (basestring, False), - 'StorageClassAnalysis': (StorageClassAnalysis, True), - 'TagFilters': ([TagFilter], False), + 'Status': (basestring, True), } -class ServerSideEncryptionByDefault(AWSProperty): +class RedirectAllRequestsTo(AWSProperty): props = { - 'KMSMasterKeyID': (basestring, False), - 'SSEAlgorithm': (basestring, True), + 'HostName': (basestring, True), + 'Protocol': (basestring, False), } -class ServerSideEncryptionRule(AWSProperty): +class RedirectRule(AWSProperty): props = { - 'ServerSideEncryptionByDefault': - (ServerSideEncryptionByDefault, False), + 'HostName': (basestring, False), + 'HttpRedirectCode': (basestring, False), + 'Protocol': (basestring, False), + 'ReplaceKeyPrefixWith': (basestring, False), + 'ReplaceKeyWith': (basestring, False), } -class BucketEncryption(AWSProperty): +class RoutingRuleCondition(AWSProperty): props = { - 'ServerSideEncryptionConfiguration': - ([ServerSideEncryptionRule], True), + 'HttpErrorCodeReturnedEquals': (basestring, False), + 'KeyPrefixEquals': (basestring, False), } -class InventoryConfiguration(AWSProperty): +class RoutingRule(AWSProperty): props = { - 'Destination': (Destination, True), - 'Enabled': (boolean, True), - 'Id': (basestring, True), - 'IncludedObjectVersions': (basestring, True), - 'OptionalFields': ([basestring], True), - 'Prefix': (basestring, False), - 'ScheduleFrequency': (basestring, True), + 'RedirectRule': (RedirectRule, True), + 'RoutingRuleCondition': (RoutingRuleCondition, False), } -class PublicAccessBlockConfiguration(AWSProperty): +class WebsiteConfiguration(AWSProperty): props = { - 'BlockPublicAcls': (boolean, False), - 'BlockPublicPolicy': (boolean, False), - 'IgnorePublicAcls': (boolean, False), - 'RestrictPublicBuckets': (boolean, False), + 'ErrorDocument': (basestring, False), + 'IndexDocument': (basestring, False), + 'RedirectAllRequestsTo': (RedirectAllRequestsTo, False), + 'RoutingRules': ([RoutingRule], False), } -class Bucket(AWSObject): +class Bucket(BucketMixin, AWSObject): resource_type = "AWS::S3::Bucket" props = { - 'AccessControl': (basestring, False), 'AccelerateConfiguration': (AccelerateConfiguration, False), + 'AccessControl': (basestring, False), 'AnalyticsConfigurations': ([AnalyticsConfiguration], False), 'BucketEncryption': (BucketEncryption, False), - 'BucketName': (s3_bucket_name, False), + 'BucketName': (basestring, False), 'CorsConfiguration': (CorsConfiguration, False), 'InventoryConfigurations': ([InventoryConfiguration], False), 'LifecycleConfiguration': (LifecycleConfiguration, False), 'LoggingConfiguration': (LoggingConfiguration, False), 'MetricsConfigurations': ([MetricsConfiguration], False), 'NotificationConfiguration': (NotificationConfiguration, False), - 'PublicAccessBlockConfiguration': (PublicAccessBlockConfiguration, - False), + 'PublicAccessBlockConfiguration': + (PublicAccessBlockConfiguration, False), 'ReplicationConfiguration': (ReplicationConfiguration, False), 'Tags': (Tags, False), + 'VersioningConfiguration': (VersioningConfiguration, False), 'WebsiteConfiguration': (WebsiteConfiguration, False), - 'VersioningConfiguration': (VersioningConfiguration, False) } - access_control_types = [ - Private, - PublicRead, - PublicReadWrite, - AuthenticatedRead, - BucketOwnerRead, - BucketOwnerFullControl, - LogDeliveryWrite, - ] - - def validate(self): - access_control = self.properties.get('AccessControl') - if access_control is not None and \ - not isinstance(access_control, AWSHelperFn): - if access_control not in self.access_control_types: - raise ValueError('AccessControl must be one of "%s"' % ( - ', '.join(self.access_control_types))) - class BucketPolicy(AWSObject): resource_type = "AWS::S3::BucketPolicy" props = { 'Bucket': (basestring, True), - 'PolicyDocument': (policytypes, True), + 'PolicyDocument': (dict, True), } diff --git a/troposphere/s3.yaml b/troposphere/s3.yaml new file mode 100644 index 000000000..8a8aa7330 --- /dev/null +++ b/troposphere/s3.yaml @@ -0,0 +1,40 @@ +# AWS::Batch overrides +header: | + try: + from awacs.aws import Policy + + policytypes = (dict, Policy) + except ImportError: + policytypes = dict, + + Private = "Private" + PublicRead = "PublicRead" + PublicReadWrite = "PublicReadWrite" + AuthenticatedRead = "AuthenticatedRead" + BucketOwnerRead = "BucketOwnerRead" + BucketOwnerFullControl = "BucketOwnerFullControl" + LogDeliveryWrite = "LogDeliveryWrite" +classes: + CorsRules: + MaxAge: + validator: common/positive_integer + AccelerateConfiguration: + AccelerationStatus: + validator: s3_transfer_acceleration_status + LifecycleRuleTransition: + TransitionInDays: + validator: common/positive_integer + AbortIncompleteMultipartUpload: + DaysAfterInitiation: + validator: common/positive_integer + NoncurrentVersionTransition: + TransitionInDays: + validator: common/positive_integer + Bucket: + validator: BucketMixin + Rule: + validator: RuleMixin + ExpirationInDays: + validator: common/positive_integer + NoncurrentVersionExpirationInDays: + validator: common/positive_integer diff --git a/troposphere/s3_validators.py b/troposphere/s3_validators.py new file mode 100644 index 000000000..33f62a56d --- /dev/null +++ b/troposphere/s3_validators.py @@ -0,0 +1,97 @@ +# Copyright (c) 2013, Bob Van Zant +# All rights reserved. +# +# See LICENSE file for full license. + +import warnings +from troposphere import AWSHelperFn + + +class RuleMixin(object): + def validate(self): + if 'Transition' in self.properties: + if 'Transitions' not in self.properties: + # aws moved from a single transition to a list of them + # and deprecated 'Transition', so let's just move it to + # the new property and not annoy the user. + self.properties['Transitions'] = [ + self.properties.pop('Transition')] + else: + raise ValueError( + 'Cannot specify both "Transition" and "Transitions" ' + 'properties on S3 Bucket Lifecycle Rule. Please use ' + '"Transitions" since the former has been deprecated.') + + if 'NoncurrentVersionTransition' in self.properties: + if 'NoncurrentVersionTransitions' not in self.properties: + warnings.warn( + 'NoncurrentVersionTransition has been deprecated in ' + 'favour of NoncurrentVersionTransitions.' + ) + # Translate the old transition format to the new format + self.properties['NoncurrentVersionTransitions'] = [ + self.properties.pop('NoncurrentVersionTransition')] + else: + raise ValueError( + 'Cannot specify both "NoncurrentVersionTransition" and ' + '"NoncurrentVersionTransitions" properties on S3 Bucket ' + 'Lifecycle Rule. Please use ' + '"NoncurrentVersionTransitions" since the former has been ' + 'deprecated.') + + if 'ExpirationInDays' in self.properties and 'ExpirationDate' in \ + self.properties: + raise ValueError( + 'Cannot specify both "ExpirationDate" and "ExpirationInDays"' + ) + + +class BucketMixin(object): + access_control_types = [ + "Private", + "PublicRead", + "PublicReadWrite", + "AuthenticatedRead", + "BucketOwnerRead", + "BucketOwnerFullControl", + "LogDeliveryWrite", + ] + + def validate(self): + access_control = self.properties.get('AccessControl') + if access_control is not None and \ + not isinstance(access_control, AWSHelperFn): + if access_control not in self.access_control_types: + raise ValueError('AccessControl must be one of "%s"' % ( + ', '.join(self.access_control_types))) + + +def s3_bucket_name(b): + + # consecutive periods not allowed + + if '..' in b: + raise ValueError("%s is not a valid s3 bucket name" % b) + + # IP addresses not allowed + + ip_re = compile(r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$') + if ip_re.match(b): + raise ValueError("%s is not a valid s3 bucket name" % b) + + s3_bucket_name_re = compile(r'^[a-z\d][a-z\d\.-]{1,61}[a-z\d]$') + if s3_bucket_name_re.match(b): + return b + else: + raise ValueError("%s is not a valid s3 bucket name" % b) + + +def s3_transfer_acceleration_status(value): + valid_status = ['Enabled', 'Suspended'] + if value not in valid_status: + raise ValueError( + 'AccelerationStatus must be one of: "%s"' % ( + ', '.join(valid_status) + ) + ) + return value diff --git a/troposphere/serverless.py b/troposphere/serverless.py index 59a69e2c0..f1a72351d 100644 --- a/troposphere/serverless.py +++ b/troposphere/serverless.py @@ -9,7 +9,7 @@ from .apigateway import AccessLogSetting, CanarySetting, MethodSetting from .awslambda import Environment, VPCConfig, validate_memory_size from .dynamodb import ProvisionedThroughput, SSESpecification -from .s3 import Filter +from .s3 import NotificationFilter from .validators import exactly_one, positive_integer try: from awacs.aws import PolicyDocument @@ -231,7 +231,7 @@ class S3Event(AWSObject): props = { 'Bucket': (basestring, True), 'Events': (list, True), - 'Filter': (Filter, False) + 'Filter': (NotificationFilter, False) }