Skip to content

Commit

Permalink
Tag AWS resources created with Terraform (#1552, PR #1701)
Browse files Browse the repository at this point in the history
  • Loading branch information
hannes-ucsc committed Dec 11, 2020
2 parents 42f6f7c + 60ed45f commit c0c29ad
Show file tree
Hide file tree
Showing 30 changed files with 114,792 additions and 92 deletions.
1 change: 1 addition & 0 deletions .gitattributes
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
/test/indexer/data/* linguist-generated=true
/attic/* linguist-generated=true
/docs/*.svg linguist-generated=true
/terraform/_schema.json linguist-generated=true
2 changes: 1 addition & 1 deletion .gitlab-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ deploy:
extends: .base
stage: deploy
script:
- terraform version
- make -C terraform check_schema
- make package
- make auto_deploy
- make create
Expand Down
8 changes: 6 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -367,9 +367,9 @@ the bucket, you may want to include the region name at then end of the bucket
name. That way you can have consistent bucket names across regions.
Create a Route 53 hosted zone for the Azul service and indexer. Multiple
deployments can share a hosted zone but they don't have to. The name of the
deployments can share a hosted zone but they don't have to. The name of the
hosted zone is configured with `AZUL_DOMAIN_NAME`. `make deploy` will
automatically provision record sets in the configured zone but it will not
automatically provision record sets in the configured zone but it will not
create the zone itself or register the domain name it is associated with.
Optionally create another hosted zone for the URL shortener. The URLs produced
Expand All @@ -379,6 +379,10 @@ supported to use the same zone for both `AZUL_URL_REDIRECT_BASE_DOMAIN_NAME` and
`AZUL_DOMAIN_NAME` but this was not tested. The shortener zone can be a
subdomain of the main Azul zone but it doesn't have to be.
The hosted zone(s) should be configured with tags for cost tracking. A list of
tags that should be provisioned is noted in
[src/azul/deployment.py:tags](src/azul/deployment.py).
If you intend to set up a Gitlab instance for CI/CD of your Azul deployments, an
EBS volume needs to be created as well. See [gitlab.tf.json.template.py] and the
[section on CI/CD](#9-continuous-deployment-and-integration) and for details.
Expand Down
52 changes: 26 additions & 26 deletions scripts/prepare_lambda_deployment.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,6 @@
)
import json
import logging
from pathlib import (
Path,
)
import shutil
import sys

Expand All @@ -18,6 +15,10 @@
from azul.logging import (
configure_script_logging,
)
from azul.terraform import (
chalice,
populate_tags,
)

log = logging.getLogger(__name__)

Expand All @@ -44,7 +45,7 @@ def transform_tf(input_json):

# Inject ES-specific environment from variables set by Terraform.
for var, val in config.es_endpoint_env(
es_endpoint=('${var.es_endpoint[0]}', '${var.es_endpoint[1]}'),
es_endpoint='${var.es_endpoint[0]}:${var.es_endpoint[1]}',
es_instance_count='${var.es_instance_count}'
).items():
func['environment']['variables'][var] = val
Expand All @@ -68,30 +69,29 @@ def patch_cloudwatch_resource(resource_type_name, property_name):

def main(argv):
parser = ArgumentParser(
description='Prepare the Terraform config generated by `chalice package'
'--pkg-format terraform` and copy it into the terraform/ '
'directory.'
description='Prepare the Terraform config generated by '
'`chalice package --pkg-format terraform` '
'and copy it into the terraform/ directory.'
)
parser.add_argument('lambda_name', help='the lambda of the config that will be '
'transformed and copied')
parser.add_argument('lambda_name',
help='The name of the Lambda function to prepare.')
options = parser.parse_args(argv)
source_dir = Path(config.project_root) / 'lambdas' / options.lambda_name / '.chalice' / 'terraform'
output_dir = Path(config.project_root) / 'terraform' / options.lambda_name
output_dir.mkdir(exist_ok=True)

deployment_src = source_dir / 'deployment.zip'
deployment_dst = output_dir / 'deployment.zip'
log.info('Copying %s to %s', deployment_src, deployment_dst)
shutil.copyfile(deployment_src, deployment_dst)

tf_src = source_dir / 'chalice.tf.json'
tf_dst = output_dir / 'chalice.tf.json'
log.info('Transforming %s to %s', tf_src, tf_dst)
with open(tf_src, 'r') as f:
output_json = json.load(f)
output_json = transform_tf(output_json)
with write_file_atomically(tf_dst) as f:
json.dump(output_json, f, indent=4)
lambda_name = options.lambda_name
src_dir = chalice.package_dir(lambda_name)
dst_dir = chalice.module_dir(lambda_name)
dst_dir.mkdir(exist_ok=True)

args = [dir / chalice.package_zip_name for dir in (src_dir, dst_dir)]
log.info('Copying %s to %s', *args)
shutil.copyfile(*args)

src_tf, dst_tf = [dir / chalice.tf_config_name for dir in (src_dir, dst_dir)]
log.info('Transforming %s to %s', src_tf, dst_tf)
with open(src_tf) as f:
tf_config = json.load(f)
tf_config = populate_tags(chalice.patch_resource_names(transform_tf(tf_config)))
with write_file_atomically(dst_tf) as f:
json.dump(tf_config, f, indent=4)


if __name__ == '__main__':
Expand Down
18 changes: 18 additions & 0 deletions scripts/rename_resources.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import json
import logging
import os
import subprocess
Expand All @@ -9,6 +10,9 @@
from azul.logging import (
configure_script_logging,
)
from azul.terraform import (
chalice,
)

log = logging.getLogger(__name__)

Expand All @@ -35,6 +39,20 @@
}


def chalice_renamed():
for lambda_name in ('indexer', 'service'):
tf_config_path = chalice.package_dir(lambda_name) / chalice.tf_config_name
with open(tf_config_path) as f:
tf_config_path = json.load(f)
mapping = chalice.resource_name_mapping(tf_config_path)
for (resource_type, name), new_name in mapping.items():
prefix = f'module.chalice_{lambda_name}.{resource_type}.'
yield prefix + name, prefix + new_name


renamed.update(dict(chalice_renamed()))


def terraform_state(command: str, *args: str) -> bytes:
proc = subprocess.run(['terraform', 'state', command, *args],
cwd=os.path.join(config.project_root, 'terraform'),
Expand Down
40 changes: 40 additions & 0 deletions scripts/terraform_schema.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
"""
Save Terraform version and schema information to the file specified
in AZUL_TRACKED_SCHEMA_PATH or verify that the Terraform information
in that file is up-to-date.
"""
import argparse
import logging
import sys

from azul.logging import (
configure_script_logging,
)
from azul.terraform import (
terraform,
)

log = logging.getLogger(__name__)


def check_schema() -> None:
schema = terraform.schema
if schema.versions != terraform.versions:
raise RuntimeError(f"Cached Terraform schema is out of date. "
f"Run '{sys.executable} {__file__} update' "
f"and commit {schema.path}")


if __name__ == '__main__':
configure_script_logging()
commands = {
'update': terraform.update_schema,
'check': check_schema
}
# https://youtrack.jetbrains.com/issue/PY-41806
# noinspection PyTypeChecker
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('command', choices=commands)
arguments = parser.parse_args()
commands[arguments.command]()
51 changes: 30 additions & 21 deletions src/azul/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,10 +79,21 @@ def es_endpoint(self) -> Optional[Netloc]:
host, _, port = es_endpoint.partition(':')
return host, int(port)

def es_endpoint_env(self, es_endpoint: Netloc, es_instance_count: Union[int, str]) -> Mapping[str, str]:
host, port = es_endpoint
def es_endpoint_env(self,
es_endpoint: Union[Netloc, str],
es_instance_count: Union[int, str]
) -> Mapping[str, str]:
if isinstance(es_endpoint, tuple):
host, port = es_endpoint
assert isinstance(host, str), host
assert isinstance(port, int), port
es_endpoint = f'{host}:{port}'
elif isinstance(es_endpoint, str):
pass
else:
assert False, es_endpoint
return {
self._es_endpoint_env_name: f'{host}:{port}',
self._es_endpoint_env_name: es_endpoint,
self._es_instance_count_env_name: str(es_instance_count)
}

Expand Down Expand Up @@ -149,11 +160,11 @@ def data_browser_domain(self):

@property
def data_browser_name(self):
return f'{self._resource_prefix}-data-browser-{self.deployment_stage}'
return f'{self.resource_prefix}-data-browser-{self.deployment_stage}'

@property
def data_portal_name(self):
return f'{self._resource_prefix}-data-portal-{self.deployment_stage}'
return f'{self.resource_prefix}-data-portal-{self.deployment_stage}'

@property
def dss_endpoint(self) -> str:
Expand Down Expand Up @@ -308,7 +319,7 @@ def _parse_principals(self, accounts) -> MutableMapping[str, List[str]]:
return result

@property
def _resource_prefix(self):
def resource_prefix(self):
prefix = os.environ['AZUL_RESOURCE_PREFIX']
self.validate_prefix(prefix)
return prefix
Expand All @@ -317,50 +328,48 @@ def qualified_resource_name(self, resource_name, suffix='', stage=None):
self._validate_term(resource_name)
if stage is None:
stage = self.deployment_stage
return f"{self._resource_prefix}-{resource_name}-{stage}{suffix}"
return f"{self.resource_prefix}-{resource_name}-{stage}{suffix}"

def unqualified_resource_name(self, qualified_resource_name: str, suffix: str = '') -> tuple:
def unqualified_resource_name(self,
qualified_resource_name: str,
suffix: str = ''
) -> Tuple[str, str]:
"""
>>> config.unqualified_resource_name('azul-foo-dev')
('foo', 'dev')
>>> config.unqualified_resource_name('azul-foo')
Traceback (most recent call last):
...
azul.RequirementError
azul.RequirementError: ['azul', 'foo']
>>> config.unqualified_resource_name('azul-object_versions-dev')
('object_versions', 'dev')
>>> config.unqualified_resource_name('azul-object-versions-dev')
Traceback (most recent call last):
...
azul.RequirementError
:param qualified_resource_name:
:param suffix:
:return:
azul.RequirementError: ['azul', 'object', 'versions', 'dev']
"""
require(qualified_resource_name.endswith(suffix))
if len(suffix) > 0:
qualified_resource_name = qualified_resource_name[:-len(suffix)]
components = qualified_resource_name.split('-')
require(len(components) == 3)
require(len(components) == 3, components)
prefix, resource_name, deployment_stage = components
require(prefix == self._resource_prefix)
require(prefix == self.resource_prefix)
return resource_name, deployment_stage

def unqualified_resource_name_or_none(self, qualified_resource_name: str, suffix: str = '') -> tuple:
def unqualified_resource_name_or_none(self,
qualified_resource_name: str,
suffix: str = ''
) -> Tuple[Optional[str], Optional[str]]:
"""
>>> config.unqualified_resource_name_or_none('azul-foo-dev')
('foo', 'dev')
>>> config.unqualified_resource_name_or_none('invalid-foo-dev')
(None, None)
:param qualified_resource_name:
:param suffix:
:return:
"""
try:
return self.unqualified_resource_name(qualified_resource_name, suffix=suffix)
Expand Down
22 changes: 0 additions & 22 deletions src/azul/deployment.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,6 @@
cached_property,
config,
)
from azul.template import (
emit,
)
from azul.types import (
JSON,
)
Expand Down Expand Up @@ -376,22 +373,3 @@ def resource(self, *args, **kwargs):
aws = AWS()
del AWS
del _cache


def _sanitize_tf(tf_config: JSON) -> JSON:
"""
Avoid errors like
Error: Missing block label
on api_gateway.tf.json line 12:
12: "resource": []
At least one object property is required, whose name represents the resource
block's type.
"""
return {k: v for k, v in tf_config.items() if v}


def emit_tf(tf_config: Optional[JSON]):
return emit(tf_config) if tf_config is None else emit(_sanitize_tf(tf_config))
5 changes: 5 additions & 0 deletions src/azul/modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,3 +43,8 @@ def load_app_module(lambda_name, **module_attributes):
path = os.path.join(config.project_root, 'lambdas', lambda_name, 'app.py')
# Changing the module name here will break doctest discoverability
return load_module(path, f'lambdas.{lambda_name}.app', module_attributes)


def load_script(script_name: str):
path = os.path.join(config.project_root, 'scripts', f'{script_name}.py')
return load_module(path, script_name)
Loading

0 comments on commit c0c29ad

Please sign in to comment.